[hdf4] 01/10: Imported Upstream version 4.2.12
Bas Couwenberg
sebastic at debian.org
Thu Jun 30 23:23:22 UTC 2016
This is an automated email from the git hooks/post-receive script.
sebastic pushed a commit to branch master
in repository hdf4.
commit 61cb233736e931e6df50862ab99b24c8290b578d
Author: Bas Couwenberg <sebastic at xs4all.nl>
Date: Thu Jun 30 19:38:15 2016 +0200
Imported Upstream version 4.2.12
---
CMakeFilters.cmake | 45 +-
CMakeInstallation.cmake | 257 +-
CMakeLists.txt | 197 +-
COPYING | 2 +-
CTestConfig.cmake | 4 +-
MANIFEST | 119 +-
Makefile.am | 8 +-
Makefile.in | 39 +-
README.txt | 59 +-
aclocal.m4 | 13 +
bin/chkmanifest | 48 +-
bin/cmakehdf4 | 4 +-
bin/h4_release_check.sh | 231 ++
bin/h4vers | 2 +-
config/apple | 14 +-
config/cmake/CPack.Info.plist.in | 26 +
config/cmake/ConfigureChecks.cmake | 1 -
config/cmake/FindJNI.cmake | 342 +++
config/cmake/HDF4Macros.cmake | 16 +-
config/cmake/HDF4UseFortran.cmake | 97 +-
config/cmake/HDF4_Examples.cmake.in | 157 +-
config/cmake/README.txt.cmake.in | 29 +-
config/cmake/UseJava.cmake | 1350 +++++++++
config/cmake/UseJavaClassFilelist.cmake | 58 +
config/cmake/UseJavaSymlinks.cmake | 38 +
config/cmake/cacheinit.cmake | 8 +-
config/cmake/h4config.h.in | 6 -
config/cmake/hdf4-config-version.cmake.in | 52 +-
config/cmake/hdf4-config.cmake.build.in | 61 -
config/cmake/hdf4-config.cmake.in | 148 +
config/cmake/hdf4-config.cmake.install.in | 71 -
.../runTest.cmake => cmake/jrunTest.cmake} | 199 +-
config/cmake/libhdf4.settings.cmake.in | 5 +-
config/cmake/patch.xml | 11 +
config/cmake_ext_mod/CPack.Info.plist.in | 32 -
config/cmake_ext_mod/ConfigureChecks.cmake | 152 +-
config/cmake_ext_mod/FindSZIP.cmake | 7 -
config/cmake_ext_mod/HDFLibMacros.cmake | 145 +-
config/cmake_ext_mod/HDFMacros.cmake | 133 +-
config/cmake_ext_mod/HDFTests.c | 44 +-
config/cmake_ext_mod/HDFUseFortran.cmake | 36 +-
config/cmake_ext_mod/grepTest.cmake | 12 +-
config/cmake_ext_mod/prunTest.cmake | 34 +-
config/cmake_ext_mod/runTest.cmake | 86 +-
configure | 1156 ++++++-
configure.ac | 98 +-
hdf/CMakeLists.txt | 2 +-
hdf/Makefile.in | 33 +-
hdf/examples/CMakeLists.txt | 14 +-
hdf/examples/CMakeTests.cmake | 16 +-
hdf/examples/Makefile.am | 4 +-
hdf/examples/Makefile.in | 37 +-
hdf/fortran/CMakeLists.txt | 135 +-
hdf/fortran/CMakeTests.cmake | 18 +-
hdf/fortran/Makefile.in | 33 +-
hdf/fortran/examples/CMakeLists.txt | 40 +-
hdf/fortran/examples/Makefile.am | 2 +-
hdf/fortran/examples/Makefile.in | 35 +-
hdf/src/CMakeLists.txt | 50 +-
hdf/src/H4api_adpt.h | 113 +-
hdf/src/Makefile.in | 33 +-
hdf/src/atom.c | 7 +-
hdf/src/bitvect.c | 6 +-
hdf/src/cdeflate.c | 6 +-
hdf/src/cnbit.c | 6 +-
hdf/src/cnone.c | 6 +-
hdf/src/crle.c | 6 +-
hdf/src/cskphuff.c | 6 +-
hdf/src/cszip.c | 6 +-
hdf/src/df24.c | 6 +-
hdf/src/df24f.c | 6 +-
hdf/src/dfan.c | 6 +-
hdf/src/dfanf.c | 6 +-
hdf/src/dfcomp.c | 6 +-
hdf/src/dfconv.c | 6 +-
hdf/src/dff.c | 6 +-
hdf/src/dfgr.c | 6 +-
hdf/src/dfgroup.c | 6 +-
hdf/src/dfimcomp.c | 6 +-
hdf/src/dfjpeg.c | 6 +-
hdf/src/dfknat.c | 6 +-
hdf/src/dfkswap.c | 7 +-
hdf/src/dfp.c | 6 +-
hdf/src/dfpf.c | 6 +-
hdf/src/dfr8.c | 6 +-
hdf/src/dfr8f.c | 6 +-
hdf/src/dfrle.c | 6 +-
hdf/src/dfsd.c | 6 +-
hdf/src/dfsdf.c | 6 +-
hdf/src/dfstubs.c | 6 +-
hdf/src/dfufp2i.c | 6 +-
hdf/src/dfufp2i.h | 6 +-
hdf/src/dfunjpeg.c | 6 +-
hdf/src/dfutil.c | 6 +-
hdf/src/dfutilf.c | 6 +-
hdf/src/dynarray.c | 6 +-
hdf/src/glist.c | 4 -
hdf/src/hbitio.c | 6 +-
hdf/src/hblocks.c | 6 +-
hdf/src/hbuffer.c | 6 +-
hdf/src/hchunks.c | 6 +-
hdf/src/hcomp.c | 10 +-
hdf/src/hcompri.c | 6 +-
hdf/src/hdfalloc.c | 20 +-
hdf/src/hdfi.h | 10 +-
hdf/src/herr.c | 6 +-
hdf/src/herr.h | 72 +-
hdf/src/herrf.c | 6 +-
hdf/src/hextelt.c | 97 +-
hdf/src/hfile.c | 6 +-
hdf/src/hfile.h | 6 +-
hdf/src/hfiledd.c | 6 +-
hdf/src/hfilef.c | 6 +-
hdf/src/hkit.c | 6 +-
hdf/src/hproto.h | 5 +-
hdf/src/linklist.c | 6 +-
hdf/src/maldebug.c | 6 +-
hdf/src/mcache.c | 6 +-
hdf/src/mfan.c | 6 +-
hdf/src/mfanf.c | 6 +-
hdf/src/mfgr.c | 1430 +++++----
hdf/src/mfgrf.c | 6 +-
hdf/src/mstdio.c | 6 +-
hdf/src/tbbt.c | 6 +-
hdf/src/vattr.c | 6 +-
hdf/src/vattr.h | 6 +-
hdf/src/vattrf.c | 6 +-
hdf/src/vconv.c | 6 +-
hdf/src/vg.c | 4 -
hdf/src/vgf.c | 6 +-
hdf/src/vhi.c | 6 +-
hdf/src/vio.c | 6 +-
hdf/src/vparse.c | 5 -
hdf/src/vrw.c | 6 +-
hdf/src/vsfld.c | 6 +-
hdf/test/CMakeLists.txt | 34 +-
hdf/test/CMakeTests.cmake | 337 ++-
hdf/test/Makefile.in | 33 +-
hdf/test/an.c | 6 +-
hdf/test/anfile.c | 6 +-
hdf/test/bitio.c | 6 +-
hdf/test/blocks.c | 6 +-
hdf/test/buffer.c | 16 +-
hdf/test/chunks.c | 6 +-
hdf/test/comp.c | 6 +-
hdf/test/conv.c | 6 +-
hdf/test/extelt.c | 6 +-
hdf/test/file.c | 6 +-
hdf/test/file1.c | 6 +-
hdf/test/forsupf.c | 6 +-
hdf/test/fortest.c | 7 +-
hdf/test/gentest.c | 6 +-
hdf/test/litend.c | 6 +-
hdf/test/man.c | 6 +-
hdf/test/mgr.c | 6 +-
hdf/test/nbit.c | 6 +-
hdf/test/rig.c | 6 +-
hdf/test/sdmms.c | 6 +-
hdf/test/sdnmms.c | 6 +-
hdf/test/sdstr.c | 6 +-
hdf/test/slab.c | 4 -
hdf/test/tbv.c | 6 +-
hdf/test/testhdf.c | 6 +-
hdf/test/tree.c | 6 +-
hdf/test/tvattr.c | 6 +-
hdf/test/tvset.c | 6 +-
hdf/test/vers.c | 6 +-
hdf/util/CMakeLists.txt | 130 +-
hdf/util/CMakeTests.cmake | 22 +-
hdf/util/Makefile.in | 33 +-
hdf/util/gif2hdf.c | 14 +-
hdf/util/gif2mem.c | 2 +-
hdf/util/h4cc.in | 2 +-
hdf/util/h4fc.in | 2 +-
hdf/util/hdf24to8.c | 6 +-
hdf/util/hdf2jpeg.c | 6 +-
hdf/util/hdf8to24.c | 6 +-
hdf/util/hdfcomp.c | 6 +-
hdf/util/hdfls.c | 6 +-
hdf/util/hdfpack.c | 6 +-
hdf/util/hdftopal.c | 6 +-
hdf/util/hdftor8.c | 6 +-
hdf/util/hdfunpac.c | 6 +-
hdf/util/he_cntrl.c | 6 +-
hdf/util/he_disp.c | 6 +-
hdf/util/he_file.c | 6 +-
hdf/util/he_main.c | 6 +-
hdf/util/jpeg2hdf.c | 6 +-
hdf/util/paltohdf.c | 6 +-
hdf/util/r8tohdf.c | 6 +-
hdf/util/ristosds.c | 6 +-
hdf/util/vmake.c | 6 +-
hdf/util/vshow.c | 6 +-
hdf/util/writehdf.c | 4 +-
java/CMakeLists.txt | 75 +
java/Makefile.am | 16 +
{mfhdf/fortran => java}/Makefile.in | 343 +--
java/examples/CMakeLists.txt | 89 +
java/examples/HDF4DatasetCreate.java | 203 ++
java/examples/HDF4FileCreate.java | 52 +
java/examples/HDF4GroupCreate.java | 108 +
java/examples/Makefile.am | 48 +
{mfhdf/nctest => java/examples}/Makefile.in | 358 +--
java/lib/ext/slf4j-nop-1.7.5.jar | Bin 0 -> 4091 bytes
java/lib/ext/slf4j-simple-1.7.5.jar | Bin 0 -> 10680 bytes
java/lib/hamcrest-core.jar | Bin 0 -> 45024 bytes
java/lib/junit.jar | Bin 0 -> 245039 bytes
java/lib/simplelogger.properties | 36 +
java/lib/slf4j-api-1.7.5.jar | Bin 0 -> 26084 bytes
java/src/CMakeLists.txt | 8 +
java/src/Makefile.am | 73 +
{hdf => java/src}/Makefile.in | 235 +-
java/src/hdf/CMakeLists.txt | 4 +
java/src/hdf/hdflib/CMakeLists.txt | 74 +
java/src/hdf/hdflib/HDFArray.java | 854 ++++++
java/src/hdf/hdflib/HDFChunkInfo.java | 52 +
java/src/hdf/hdflib/HDFCompInfo.java | 33 +
java/src/hdf/hdflib/HDFConstants.java | 419 +++
java/src/hdf/hdflib/HDFDeflateCompInfo.java | 39 +
java/src/hdf/hdflib/HDFException.java | 67 +
java/src/hdf/hdflib/HDFIMCOMPCompInfo.java | 29 +
java/src/hdf/hdflib/HDFJPEGCompInfo.java | 49 +
java/src/hdf/hdflib/HDFJavaException.java | 40 +
java/src/hdf/hdflib/HDFLibrary.java | 3172 ++++++++++++++++++++
java/src/hdf/hdflib/HDFLibraryException.java | 108 +
java/src/hdf/hdflib/HDFNBITChunkInfo.java | 47 +
java/src/hdf/hdflib/HDFNBITCompInfo.java | 47 +
java/src/hdf/hdflib/HDFNativeData.java | 166 +
java/src/hdf/hdflib/HDFNewCompInfo.java | 33 +
.../src/hdf/hdflib/HDFNotImplementedException.java | 44 +
java/src/hdf/hdflib/HDFOldCompInfo.java | 33 +
java/src/hdf/hdflib/HDFOldRLECompInfo.java | 30 +
java/src/hdf/hdflib/HDFRLECompInfo.java | 29 +
java/src/hdf/hdflib/HDFSKPHUFFCompInfo.java | 32 +
java/src/hdf/hdflib/HDFSZIPCompInfo.java | 53 +
java/src/hdf/overview.html | 94 +
java/src/jni/CMakeLists.txt | 68 +
java/src/jni/Makefile.am | 32 +
{mfhdf/xdr => java/src/jni}/Makefile.in | 215 +-
java/src/jni/h4jni.h | 176 ++
java/src/jni/hdfImp.c | 430 +++
java/src/jni/hdfanImp.c | 393 +++
java/src/jni/hdfdfpalImp.c | 221 ++
java/src/jni/hdfdfuImp.c | 44 +
java/src/jni/hdfexceptionImp.c | 186 ++
java/src/jni/hdfgrImp.c | 1044 +++++++
java/src/jni/hdfheImp.c | 52 +
java/src/jni/hdfhxImp.c | 88 +
java/src/jni/hdfnativeImp.c | 1200 ++++++++
java/src/jni/hdfr24Imp.c | 360 +++
java/src/jni/hdfr8Imp.c | 426 +++
java/src/jni/hdfsdsImp.c | 1980 ++++++++++++
java/src/jni/hdfstructsutil.c | 639 ++++
java/src/jni/hdfvdataImp.c | 1077 +++++++
java/src/jni/hdfvfImp.c | 115 +
java/src/jni/hdfvgroupImp.c | 893 ++++++
java/src/jni/hdfvhImp.c | 151 +
java/src/jni/hdfvqImp.c | 58 +
java/src/jni/hdfvsqImp.c | 282 ++
java/test/CMakeLists.txt | 100 +
java/test/JUnit-interface.ert | 2 +
java/test/JUnit-interface.txt | 367 +++
java/test/Makefile.am | 62 +
{mfhdf/nctest => java/test}/Makefile.in | 373 +--
java/test/TestAll.java | 32 +
java/test/TestH4.java | 268 ++
java/test/TestH4ANparams.java | 195 ++
java/test/TestH4DFPparams.java | 110 +
java/test/TestH4DFRparams.java | 139 +
java/test/TestH4DFparams.java | 130 +
java/test/TestH4GRparams.java | 535 ++++
java/test/TestH4HCparams.java | 52 +
java/test/TestH4SDparams.java | 746 +++++
java/test/TestH4VSparams.java | 476 +++
java/test/TestH4Vparams.java | 404 +++
java/test/junit.sh.in | 260 ++
libhdf4.settings.in | 3 +
m4/ax_check_class.m4 | 144 +
m4/ax_check_classpath.m4 | 60 +
m4/ax_check_java_home.m4 | 80 +
m4/ax_check_junit.m4 | 70 +
m4/ax_check_rqrd_class.m4 | 62 +
m4/ax_java_check_class.m4 | 85 +
m4/ax_java_options.m4 | 48 +
m4/ax_jni_include_dir.m4 | 132 +
m4/ax_prog_jar.m4 | 49 +
m4/ax_prog_java.m4 | 115 +
m4/ax_prog_java_cc.m4 | 104 +
m4/ax_prog_java_works.m4 | 134 +
m4/ax_prog_javac.m4 | 79 +
m4/ax_prog_javac_works.m4 | 72 +
m4/ax_prog_javadoc.m4 | 50 +
m4/ax_prog_javah.m4 | 64 +
m4/ax_try_compile_java.m4 | 55 +
m4/ax_try_run_java.m4 | 56 +
man/Makefile.in | 33 +-
man/hdf.1 | 6 +-
mfhdf/CMakeLists.txt | 2 +-
mfhdf/Makefile.in | 33 +-
mfhdf/dumper/CMakeLists.txt | 17 +-
mfhdf/dumper/Makefile.in | 33 +-
mfhdf/dumper/hdp.c | 6 +-
mfhdf/dumper/hdp.h | 6 +-
mfhdf/dumper/hdp_dump.c | 6 +-
mfhdf/dumper/hdp_gr.c | 6 +-
mfhdf/dumper/hdp_list.c | 6 +-
mfhdf/dumper/hdp_rig.c | 6 +-
mfhdf/dumper/hdp_sds.c | 13 +-
mfhdf/dumper/hdp_util.c | 4 -
mfhdf/dumper/hdp_vd.c | 6 +-
mfhdf/dumper/hdp_vg.c | 6 +-
mfhdf/dumper/show.c | 4 -
mfhdf/dumper/testfiles/Roy-64.nc | Bin 0 -> 4021 bytes
mfhdf/dumper/testfiles/dumpsds-19.out | 1 +
mfhdf/dumper/testhdp.sh.in | 5 +-
mfhdf/examples/CMakeLists.txt | 10 +-
mfhdf/examples/CMakeTests.cmake | 8 +-
mfhdf/examples/Makefile.am | 2 +-
mfhdf/examples/Makefile.in | 35 +-
mfhdf/examples/SD_chunking_example.c | 24 +-
mfhdf/examples/SD_find_sds_by_name.c | 2 +-
mfhdf/examples/SD_get_attr.c | 3 +-
mfhdf/examples/SD_mv_sds_to_external.c | 2 +-
mfhdf/examples/SD_read_from_sds.c | 2 +-
mfhdf/fortran/CMakeLists.txt | 131 +-
mfhdf/fortran/CMakeTests.cmake | 32 +-
mfhdf/fortran/Makefile.in | 33 +-
mfhdf/fortran/examples/CMakeLists.txt | 30 +-
mfhdf/fortran/examples/Makefile.am | 2 +-
mfhdf/fortran/examples/Makefile.in | 35 +-
mfhdf/fortran/mfsdf.c | 5 +-
mfhdf/hdfimport/CMakeLists.txt | 17 +-
mfhdf/hdfimport/CMakeTests.cmake | 208 +-
mfhdf/hdfimport/Makefile.in | 33 +-
mfhdf/hdiff/CMakeLists.txt | 17 +-
mfhdf/hdiff/CMakeTests.cmake | 26 +-
mfhdf/hdiff/Makefile.in | 33 +-
mfhdf/hdiff/hdiff_dim.c | 36 +-
mfhdf/hdiff/hdiff_gr.c | 8 +-
mfhdf/hdiff/hdiff_list.c | 76 +-
mfhdf/hdiff/hdiff_mattbl.c | 18 +-
mfhdf/hdiff/hdiff_sds.c | 22 +-
mfhdf/hdiff/hdiff_vs.c | 10 +-
mfhdf/hrepack/CMakeLists.txt | 23 +-
mfhdf/hrepack/CMakeTests.cmake | 165 +-
mfhdf/hrepack/Makefile.in | 33 +-
mfhdf/hrepack/hrepack.c | 12 +-
mfhdf/hrepack/hrepack_an.c | 12 +-
mfhdf/hrepack/hrepack_dim.c | 40 +-
mfhdf/hrepack/hrepack_gr.c | 14 +-
mfhdf/hrepack/hrepack_list.c | 54 +-
mfhdf/hrepack/hrepack_lsttable.c | 14 +-
mfhdf/hrepack/hrepack_opttable.c | 32 +-
mfhdf/hrepack/hrepack_parse.c | 16 +-
mfhdf/hrepack/hrepack_sds.c | 20 +-
mfhdf/hrepack/hrepack_vg.c | 6 +-
mfhdf/hrepack/hrepack_vs.c | 26 +-
mfhdf/hrepack/hrepacktst.c | 32 +-
mfhdf/libsrc/CMakeLists.txt | 67 +-
mfhdf/libsrc/Makefile.in | 33 +-
mfhdf/libsrc/array.c | 25 +-
mfhdf/libsrc/cdf.c | 196 +-
mfhdf/libsrc/hdfnctest.c | 4 -
mfhdf/libsrc/hdfsds.c | 422 ++-
mfhdf/libsrc/local_nc.h | 484 +--
mfhdf/libsrc/mfsd.c | 1070 +++----
mfhdf/libsrc/string.c | 4 +-
mfhdf/libsrc/var.c | 8 +-
mfhdf/ncdump/CMakeLists.txt | 17 +-
mfhdf/ncdump/Makefile.in | 33 +-
mfhdf/ncgen/CMakeLists.txt | 17 +-
mfhdf/ncgen/Makefile.in | 33 +-
mfhdf/nctest/CMakeLists.txt | 12 +-
mfhdf/nctest/Makefile.in | 33 +-
mfhdf/test/CMakeLists.txt | 91 +-
mfhdf/test/CMakeTests.cmake | 269 +-
mfhdf/test/Makefile.am | 2 +-
mfhdf/test/Makefile.in | 44 +-
mfhdf/test/Roy-64.nc | Bin 0 -> 4021 bytes
mfhdf/test/Roy.nc | Bin 0 -> 4020 bytes
mfhdf/test/hdfnctest.c | 4 -
mfhdf/test/hdftest.c | 235 +-
mfhdf/test/hdftest.h | 29 +
mfhdf/test/tcoordvar.c | 2 +-
mfhdf/test/texternal.c | 814 +++++
mfhdf/test/tfile.c | 327 +-
mfhdf/test/tsdsprops.c | 2 +-
mfhdf/test/tunlim.c | 2 +-
mfhdf/test/tutils.c | 315 +-
mfhdf/xdr/CMakeLists.txt | 58 +-
mfhdf/xdr/Makefile.in | 33 +-
release_notes/HISTORY.txt | 352 +++
release_notes/INSTALL | 4 +-
release_notes/INSTALL_CMake.txt | 346 +--
release_notes/INSTALL_CYGWIN.txt | 4 +-
release_notes/RELEASE.txt | 344 +--
release_notes/USING_CMake_Examples.txt | 84 +-
release_notes/USING_HDF4_CMake.txt | 172 +-
release_notes/USING_HDF4_VS.txt | 4 +-
release_notes/misc_docs.txt | 12 +-
400 files changed, 33114 insertions(+), 6656 deletions(-)
diff --git a/CMakeFilters.cmake b/CMakeFilters.cmake
index 0fd994f..0114f7f 100644
--- a/CMakeFilters.cmake
+++ b/CMakeFilters.cmake
@@ -36,9 +36,13 @@ option (HDF4_ENABLE_JPEG_LIB_SUPPORT "Enable libjpeg" ON)
if (HDF4_ENABLE_JPEG_LIB_SUPPORT)
if (NOT H4_JPEGLIB_HEADER)
if (NOT JPEG_USE_EXTERNAL)
- find_package (JPEG NAMES ${JPEG_PACKAGE_NAME}${HDF_PACKAGE_EXT})
+ find_package (JPEG NAMES ${JPEG_PACKAGE_NAME}${HDF_PACKAGE_EXT} COMPONENTS static shared)
if (NOT JPEG_FOUND)
find_package (JPEG) # Legacy find
+ if (JPEG_FOUND)
+ set (LINK_LIBS ${LINK_LIBS} ${JPEG_LIBRARIES})
+ set (LINK_SHARED_LIBS ${LINK_SHARED_LIBS} ${JPEG_LIBRARIES})
+ endif (JPEG_FOUND)
endif (NOT JPEG_FOUND)
endif (NOT JPEG_USE_EXTERNAL)
if (JPEG_FOUND)
@@ -46,7 +50,7 @@ if (HDF4_ENABLE_JPEG_LIB_SUPPORT)
set (H4_HAVE_LIBJPEG 1)
set (H4_JPEGLIB_HEADER "jpeglib.h")
set (JPEG_INCLUDE_DIR_GEN ${JPEG_INCLUDE_DIR})
- set (JPEG_INCLUDE_DIRS ${JPEG_INCLUDE_DIR})
+ set (JPEG_INCLUDE_DIRS ${JPEG_INCLUDE_DIRS} ${JPEG_INCLUDE_DIR})
else (JPEG_FOUND)
if (HDF4_ALLOW_EXTERNAL_SUPPORT MATCHES "SVN" OR HDF4_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
# May need to build JPEG with PIC on x64 machines with gcc
@@ -61,7 +65,7 @@ if (HDF4_ENABLE_JPEG_LIB_SUPPORT)
endif (DEFINED CMAKE_ANSI_CFLAGS)
endif (BUILD_JPEG_WITH_PIC)
- EXTERNAL_JPEG_LIBRARY (${HDF4_ALLOW_EXTERNAL_SUPPORT} ${LIB_TYPE} ${JPEG_CMAKE_C_FLAGS})
+ EXTERNAL_JPEG_LIBRARY (${HDF4_ALLOW_EXTERNAL_SUPPORT} ${JPEG_CMAKE_C_FLAGS})
set (H4_HAVE_JPEGLIB_H 1)
set (H4_HAVE_LIBJPEG 1)
set (H4_JPEGLIB_HEADER "jpeglib.h")
@@ -74,7 +78,10 @@ if (HDF4_ENABLE_JPEG_LIB_SUPPORT)
# This project is being called from within another and JPEGLib is already configured
set (H4_HAVE_JPEGLIB_H 1)
endif (NOT H4_JPEGLIB_HEADER)
- set (LINK_LIBS ${LINK_LIBS} ${JPEG_LIBRARY})
+ if (BUILD_SHARED_LIBS)
+ set (LINK_SHARED_LIBS ${LINK_SHARED_LIBS} ${JPEG_SHARED_LIBRARY})
+ endif (BUILD_SHARED_LIBS)
+ set (LINK_LIBS ${LINK_LIBS} ${JPEG_STATIC_LIBRARY})
INCLUDE_DIRECTORIES (${JPEG_INCLUDE_DIRS})
message (STATUS "JPEGLIB is ON")
endif (HDF4_ENABLE_JPEG_LIB_SUPPORT)
@@ -86,9 +93,13 @@ option (HDF4_ENABLE_Z_LIB_SUPPORT "Enable Zlib Filters" ON)
if (HDF4_ENABLE_Z_LIB_SUPPORT)
if (NOT H4_ZLIB_HEADER)
if (NOT ZLIB_USE_EXTERNAL)
- find_package (ZLIB NAMES ${ZLIB_PACKAGE_NAME}${HDF_PACKAGE_EXT})
+ find_package (ZLIB NAMES ${ZLIB_PACKAGE_NAME}${HDF_PACKAGE_EXT} COMPONENTS static shared)
if (NOT ZLIB_FOUND)
find_package (ZLIB) # Legacy find
+ if (ZLIB_FOUND)
+ set (LINK_LIBS ${LINK_LIBS} ${ZLIB_LIBRARIES})
+ set (LINK_SHARED_LIBS ${LINK_SHARED_LIBS} ${ZLIB_LIBRARIES})
+ endif (ZLIB_FOUND)
endif (NOT ZLIB_FOUND)
endif (NOT ZLIB_USE_EXTERNAL)
if (ZLIB_FOUND)
@@ -97,10 +108,10 @@ if (HDF4_ENABLE_Z_LIB_SUPPORT)
set (H4_HAVE_LIBZ 1)
set (H4_ZLIB_HEADER "zlib.h")
set (ZLIB_INCLUDE_DIR_GEN ${ZLIB_INCLUDE_DIR})
- set (ZLIB_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR})
+ set (ZLIB_INCLUDE_DIRS ${ZLIB_INCLUDE_DIRS} ${ZLIB_INCLUDE_DIR})
else (ZLIB_FOUND)
if (HDF4_ALLOW_EXTERNAL_SUPPORT MATCHES "SVN" OR HDF4_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
- EXTERNAL_ZLIB_LIBRARY (${HDF4_ALLOW_EXTERNAL_SUPPORT} ${LIB_TYPE})
+ EXTERNAL_ZLIB_LIBRARY (${HDF4_ALLOW_EXTERNAL_SUPPORT})
set (H4_HAVE_FILTER_DEFLATE 1)
set (H4_HAVE_ZLIB_H 1)
set (H4_HAVE_LIBZ 1)
@@ -115,7 +126,10 @@ if (HDF4_ENABLE_Z_LIB_SUPPORT)
set (H4_HAVE_ZLIB_H 1)
set (H4_HAVE_LIBZ 1)
endif (NOT H4_ZLIB_HEADER)
- set (LINK_LIBS ${LINK_LIBS} ${ZLIB_LIBRARIES})
+ if (BUILD_SHARED_LIBS)
+ set (LINK_SHARED_LIBS ${LINK_SHARED_LIBS} ${ZLIB_SHARED_LIBRARY})
+ endif (BUILD_SHARED_LIBS)
+ set (LINK_LIBS ${LINK_LIBS} ${ZLIB_STATIC_LIBRARY})
INCLUDE_DIRECTORIES (${ZLIB_INCLUDE_DIRS})
message (STATUS "Filter ZLIB is ON")
endif (HDF4_ENABLE_Z_LIB_SUPPORT)
@@ -128,9 +142,13 @@ set (SZIP_INFO "disabled")
if (HDF4_ENABLE_SZIP_SUPPORT)
option (HDF4_ENABLE_SZIP_ENCODING "Use SZip Encoding" OFF)
if (NOT SZIP_USE_EXTERNAL)
- find_package (SZIP NAMES ${SZIP_PACKAGE_NAME}${HDF_PACKAGE_EXT})
+ find_package (SZIP NAMES ${SZIP_PACKAGE_NAME}${HDF_PACKAGE_EXT} COMPONENTS static shared)
if (NOT SZIP_FOUND)
find_package (SZIP) # Legacy find
+ if (SZIP_FOUND)
+ set (LINK_LIBS ${LINK_LIBS} ${SZIP_LIBRARIES})
+ set (LINK_SHARED_LIBS ${LINK_SHARED_LIBS} ${SZIP_LIBRARIES})
+ endif (SZIP_FOUND)
endif (NOT SZIP_FOUND)
endif (NOT SZIP_USE_EXTERNAL)
if (SZIP_FOUND)
@@ -138,10 +156,10 @@ if (HDF4_ENABLE_SZIP_SUPPORT)
set (H4_HAVE_SZLIB_H 1)
set (H4_HAVE_LIBSZ 1)
set (SZIP_INCLUDE_DIR_GEN ${SZIP_INCLUDE_DIR})
- set (SZIP_INCLUDE_DIRS ${SZIP_INCLUDE_DIR})
+ set (SZIP_INCLUDE_DIRS ${SZIP_INCLUDE_DIRS} ${SZIP_INCLUDE_DIR})
else (SZIP_FOUND)
if (HDF4_ALLOW_EXTERNAL_SUPPORT MATCHES "SVN" OR HDF4_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
- EXTERNAL_SZIP_LIBRARY (${HDF4_ALLOW_EXTERNAL_SUPPORT} ${LIB_TYPE} ${HDF4_ENABLE_SZIP_ENCODING})
+ EXTERNAL_SZIP_LIBRARY (${HDF4_ALLOW_EXTERNAL_SUPPORT} ${HDF4_ENABLE_SZIP_ENCODING})
set (H4_HAVE_FILTER_SZIP 1)
set (H4_HAVE_SZLIB_H 1)
set (H4_HAVE_LIBSZ 1)
@@ -150,7 +168,10 @@ if (HDF4_ENABLE_SZIP_SUPPORT)
message (FATAL_ERROR "SZIP is Required for SZIP support in HDF4")
endif (HDF4_ALLOW_EXTERNAL_SUPPORT MATCHES "SVN" OR HDF4_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
endif (SZIP_FOUND)
- set (LINK_LIBS ${LINK_LIBS} ${SZIP_LIBRARIES})
+ if (BUILD_SHARED_LIBS)
+ set (LINK_SHARED_LIBS ${LINK_SHARED_LIBS} ${SZIP_SHARED_LIBRARY})
+ endif (BUILD_SHARED_LIBS)
+ set (LINK_LIBS ${LINK_LIBS} ${SZIP_STATIC_LIBRARY})
INCLUDE_DIRECTORIES (${SZIP_INCLUDE_DIRS})
message (STATUS "Filter SZIP is ON")
if (HDF4_ENABLE_SZIP_ENCODING)
diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake
index 8d3e4f8..20113e1 100644
--- a/CMakeInstallation.cmake
+++ b/CMakeInstallation.cmake
@@ -1,3 +1,4 @@
+include (CMakePackageConfigHelpers)
#-----------------------------------------------------------------------------
# Check for Installation Utilities
@@ -28,7 +29,7 @@ endif (NOT HDF4_INSTALL_NO_DEVELOPMENT)
if (NOT HDF4_EXTERNALLY_CONFIGURED)
install (
EXPORT ${HDF4_EXPORTED_TARGETS}
- DESTINATION ${HDF4_INSTALL_CMAKE_DIR}/${HDF4_PACKAGE}
+ DESTINATION ${HDF4_INSTALL_CMAKE_DIR}
FILE ${HDF4_PACKAGE}${HDF_PACKAGE_EXT}-targets.cmake
COMPONENT configinstall
)
@@ -45,7 +46,7 @@ if (NOT HDF4_EXTERNALLY_CONFIGURED)
endif (NOT HDF4_EXTERNALLY_CONFIGURED)
#-----------------------------------------------------------------------------
-# Configure the hdf4-config.cmake file for the build directory
+# Set includes needed for build
#-----------------------------------------------------------------------------
set (HDF4_INCLUDES_BUILD_TIME
${HDF4_HDF_SOURCE_DIR}
@@ -53,13 +54,26 @@ set (HDF4_INCLUDES_BUILD_TIME
${HDF4_MFHDF_XDR_DIR}
${HDF4_BINARY_DIR}
)
+
+#-----------------------------------------------------------------------------
+# Set variables needed for installation
+#-----------------------------------------------------------------------------
set (HDF4_VERSION_STRING ${HDF4_PACKAGE_VERSION})
set (HDF4_VERSION_MAJOR ${HDF4_PACKAGE_VERSION_MAJOR})
set (HDF4_VERSION_MINOR ${HDF4_PACKAGE_VERSION_MINOR})
-configure_file (
- ${HDF_RESOURCES_DIR}/hdf4-config.cmake.build.in
- ${HDF4_BINARY_DIR}/${HDF4_PACKAGE}${HDF_PACKAGE_EXT}-config.cmake @ONLY
+#-----------------------------------------------------------------------------
+# Configure the hdf4-config.cmake file for the build directory
+#-----------------------------------------------------------------------------
+set (INCLUDE_INSTALL_DIR ${HDF4_INSTALL_INCLUDE_DIR})
+set (SHARE_INSTALL_DIR "${CMAKE_CURRENT_BINARY_DIR}/${HDF4_INSTALL_CMAKE_DIR}" )
+set (CURRENT_BUILD_DIR "${CMAKE_CURRENT_BINARY_DIR}" )
+configure_package_config_file (
+ ${HDF_RESOURCES_DIR}/hdf4-config.cmake.in
+ "${HDF4_BINARY_DIR}/${HDF4_PACKAGE}${HDF_PACKAGE_EXT}-config.cmake"
+ INSTALL_DESTINATION "${HDF4_INSTALL_CMAKE_DIR}"
+ PATH_VARS INCLUDE_INSTALL_DIR SHARE_INSTALL_DIR CURRENT_BUILD_DIR
+ INSTALL_PREFIX "${CMAKE_CURRENT_BINARY_DIR}"
)
#-----------------------------------------------------------------------------
@@ -67,28 +81,32 @@ configure_file (
#-----------------------------------------------------------------------------
if (NOT HDF4_EXTERNALLY_CONFIGURED)
configure_file (
- ${HDF_RESOURCES_DIR}/FindHDF4.cmake.in
- ${HDF4_BINARY_DIR}/CMakeFiles/FindHDF4${HDF_PACKAGE_EXT}.cmake @ONLY
+ ${HDF_RESOURCES_DIR}/FindHDF4.cmake.in
+ ${HDF4_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/FindHDF4${HDF_PACKAGE_EXT}.cmake @ONLY
)
install (
- FILES ${HDF4_BINARY_DIR}/CMakeFiles/FindHDF4${HDF_PACKAGE_EXT}.cmake
- DESTINATION ${HDF4_INSTALL_CMAKE_DIR}/${HDF4_PACKAGE}
+ FILES ${HDF4_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/FindHDF4${HDF_PACKAGE_EXT}.cmake
+ DESTINATION ${HDF4_INSTALL_CMAKE_DIR}
COMPONENT configinstall
)
endif (NOT HDF4_EXTERNALLY_CONFIGURED)
-
#-----------------------------------------------------------------------------
# Configure the hdf4-config.cmake file for the install directory
#-----------------------------------------------------------------------------
+set (INCLUDE_INSTALL_DIR ${HDF4_INSTALL_INCLUDE_DIR})
+set (SHARE_INSTALL_DIR "${CMAKE_INSTALL_PREFIX}/${HDF4_INSTALL_CMAKE_DIR}" )
+set (CURRENT_BUILD_DIR "${CMAKE_INSTALL_PREFIX}" )
+configure_package_config_file (
+ ${HDF_RESOURCES_DIR}/hdf4-config.cmake.in
+ "${HDF4_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${HDF4_PACKAGE}${HDF_PACKAGE_EXT}-config.cmake"
+ INSTALL_DESTINATION "${HDF4_INSTALL_CMAKE_DIR}"
+ PATH_VARS INCLUDE_INSTALL_DIR SHARE_INSTALL_DIR CURRENT_BUILD_DIR
+)
if (NOT HDF4_EXTERNALLY_CONFIGURED)
- configure_file (
- ${HDF_RESOURCES_DIR}/hdf4-config.cmake.install.in
- ${HDF4_BINARY_DIR}/CMakeFiles/${HDF4_PACKAGE}${HDF_PACKAGE_EXT}-config.cmake @ONLY
- )
install (
- FILES ${HDF4_BINARY_DIR}/CMakeFiles/${HDF4_PACKAGE}${HDF_PACKAGE_EXT}-config.cmake
- DESTINATION ${HDF4_INSTALL_CMAKE_DIR}/${HDF4_PACKAGE}
+ FILES ${HDF4_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${HDF4_PACKAGE}${HDF_PACKAGE_EXT}-config.cmake
+ DESTINATION ${HDF4_INSTALL_CMAKE_DIR}
COMPONENT configinstall
)
endif (NOT HDF4_EXTERNALLY_CONFIGURED)
@@ -99,11 +117,11 @@ endif (NOT HDF4_EXTERNALLY_CONFIGURED)
if (NOT HDF4_EXTERNALLY_CONFIGURED)
configure_file (
${HDF_RESOURCES_DIR}/hdf4-config-version.cmake.in
- ${HDF4_BINARY_DIR}/CMakeFiles/${HDF4_PACKAGE}${HDF_PACKAGE_EXT}-config-version.cmake @ONLY
+ ${HDF4_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${HDF4_PACKAGE}${HDF_PACKAGE_EXT}-config-version.cmake @ONLY
)
install (
- FILES ${HDF4_BINARY_DIR}/CMakeFiles/${HDF4_PACKAGE}${HDF_PACKAGE_EXT}-config-version.cmake
- DESTINATION ${HDF4_INSTALL_CMAKE_DIR}/${HDF4_PACKAGE}
+ FILES ${HDF4_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${HDF4_PACKAGE}${HDF_PACKAGE_EXT}-config-version.cmake
+ DESTINATION ${HDF4_INSTALL_CMAKE_DIR}
COMPONENT configinstall
)
endif (NOT HDF4_EXTERNALLY_CONFIGURED)
@@ -117,12 +135,12 @@ else (H4_WORDS_BIGENDIAN)
set (BYTESEX little-endian)
endif (H4_WORDS_BIGENDIAN)
configure_file (
- ${HDF_RESOURCES_DIR}/libhdf4.settings.cmake.in
+ ${HDF_RESOURCES_DIR}/libhdf4.settings.cmake.in
${HDF4_BINARY_DIR}/libhdf4.settings @ONLY
)
install (
FILES ${HDF4_BINARY_DIR}/libhdf4.settings
- DESTINATION ${HDF4_INSTALL_CMAKE_DIR}/${HDF4_PACKAGE}
+ DESTINATION ${HDF4_INSTALL_LIB_DIR}
COMPONENT libraries
)
@@ -132,7 +150,7 @@ install (
option (HDF4_PACK_EXAMPLES "Package the HDF4 Library Examples Compressed File" OFF)
if (HDF4_PACK_EXAMPLES)
configure_file (
- ${HDF_RESOURCES_DIR}/HDF4_Examples.cmake.in
+ ${HDF_RESOURCES_DIR}/HDF4_Examples.cmake.in
${HDF4_BINARY_DIR}/HDF4_Examples.cmake @ONLY
)
install (
@@ -141,9 +159,17 @@ if (HDF4_PACK_EXAMPLES)
COMPONENT hdfdocuments
)
if (EXISTS "${HDF4_EXAMPLES_COMPRESSED_DIR}/${HDF4_EXAMPLES_COMPRESSED}")
+ execute_process(
+ COMMAND ${CMAKE_COMMAND} -E tar xzf ${HDF4_EXAMPLES_COMPRESSED_DIR}/${HDF4_EXAMPLES_COMPRESSED}
+ )
+ install (
+ DIRECTORY ${HDF4_BINARY_DIR}/HDF4Examples
+ DESTINATION ${HDF4_INSTALL_DATA_DIR}
+ USE_SOURCE_PERMISSIONS
+ COMPONENT hdfdocuments
+ )
install (
FILES
- ${HDF4_EXAMPLES_COMPRESSED_DIR}/${HDF4_EXAMPLES_COMPRESSED}
${HDF4_SOURCE_DIR}/release_notes/USING_CMake_Examples.txt
DESTINATION ${HDF4_INSTALL_DATA_DIR}
COMPONENT hdfdocuments
@@ -245,16 +271,20 @@ if (NOT HDF4_EXTERNALLY_CONFIGURED AND NOT HDF4_NO_PACKAGES)
set (CPACK_RESOURCE_FILE_README "${CMAKE_CURRENT_SOURCE_DIR}/release_notes/RELEASE.txt")
endif (EXISTS "${HDF4_SOURCE_DIR}/release_notes")
set (CPACK_PACKAGE_RELOCATABLE TRUE)
- set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}/${CPACK_PACKAGE_NAME}/${CPACK_PACKAGE_VERSION}")
+ if (OVERRIDE_INSTALL_VERSION)
+ set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}/${CPACK_PACKAGE_NAME}/${OVERRIDE_INSTALL_VERSION}")
+ else (OVERRIDE_INSTALL_VERSION)
+ set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}/${CPACK_PACKAGE_NAME}/${CPACK_PACKAGE_VERSION}")
+ endif (OVERRIDE_INSTALL_VERSION)
set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_EXT_DIR}/hdf.bmp")
- set (CPACK_GENERATOR "TGZ")
+ set (CPACK_GENERATOR "TGZ")
if (WIN32)
- set (CPACK_GENERATOR "ZIP")
+ set (CPACK_GENERATOR "ZIP")
- if (NSIS_EXECUTABLE)
- list (APPEND CPACK_GENERATOR "NSIS")
- endif (NSIS_EXECUTABLE)
+ if (NSIS_EXECUTABLE)
+ list (APPEND CPACK_GENERATOR "NSIS")
+ endif (NSIS_EXECUTABLE)
# Installers for 32- vs. 64-bit CMake:
# - Root install directory (displayed to end user at installer-run time)
# - "NSIS package/display name" (text used in the installer GUI)
@@ -274,14 +304,17 @@ if (NOT HDF4_EXTERNALLY_CONFIGURED AND NOT HDF4_NO_PACKAGES)
# set the package header icon for MUI
set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_EXT_DIR}\\\\hdf.bmp")
set (CPACK_NSIS_DISPLAY_NAME "${CPACK_NSIS_PACKAGE_NAME}")
- set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}\\\\${CPACK_PACKAGE_NAME}\\\\${CPACK_PACKAGE_VERSION}")
- set (CPACK_MONOLITHIC_INSTALL ON)
+ if (OVERRIDE_INSTALL_VERSION)
+ set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}\\\\${CPACK_PACKAGE_NAME}\\\\${OVERRIDE_INSTALL_VERSION}")
+ else (OVERRIDE_INSTALL_VERSION)
+ set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}\\\\${CPACK_PACKAGE_NAME}\\\\${CPACK_PACKAGE_VERSION}")
+ endif (OVERRIDE_INSTALL_VERSION)
set (CPACK_NSIS_CONTACT "${HDF4_PACKAGE_BUGREPORT}")
set (CPACK_NSIS_MODIFY_PATH ON)
-
- if (WIX_EXECUTABLE)
- list (APPEND CPACK_GENERATOR "WIX")
- endif (WIX_EXECUTABLE)
+
+ if (WIX_EXECUTABLE)
+ list (APPEND CPACK_GENERATOR "WIX")
+ endif (WIX_EXECUTABLE)
#WiX variables
set (CPACK_WIX_UNINSTALL "1")
# .. variable:: CPACK_WIX_LICENSE_RTF
@@ -319,30 +352,31 @@ if (NOT HDF4_EXTERNALLY_CONFIGURED AND NOT HDF4_NO_PACKAGES)
#
# This image must be 493 by 312 pixels.
#
+ set(CPACK_WIX_PROPERTY_ARPCOMMENTS "Hierarchical Data Format (HDF) Software Library and Utilities")
+ set(CPACK_WIX_PROPERTY_ARPURLINFOABOUT "${HDF4_PACKAGE_URL}")
+ set(CPACK_WIX_PROPERTY_ARPHELPLINK "${HDF4_PACKAGE_BUGREPORT}")
+ if (BUILD_SHARED_LIBS)
+ set(CPACK_WIX_PATCH_FILE "${HDF_RESOURCES_DIR}/patch.xml")
+ endif (BUILD_SHARED_LIBS)
elseif (APPLE)
- list (APPEND CPACK_GENERATOR "DragNDrop")
+ list (APPEND CPACK_GENERATOR "DragNDrop")
set (CPACK_COMPONENTS_ALL_IN_ONE_PACKAGE ON)
set (CPACK_PACKAGING_INSTALL_PREFIX "/${CPACK_PACKAGE_INSTALL_DIRECTORY}")
set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_EXT_DIR}/hdf.icns")
- if (HDF4_PACK_MACOSX_BUNDLE)
- list (APPEND CPACK_GENERATOR "Bundle")
+ option (HDF4_PACK_MACOSX_FRAMEWORK "Package the HDF Library in a Framework" OFF)
+ if (HDF4_PACK_MACOSX_FRAMEWORK AND HDF4_BUILD_FRAMEWORKS)
set (CPACK_BUNDLE_NAME "${HDF4_PACKAGE_STRING}")
set (CPACK_BUNDLE_LOCATION "/") # make sure CMAKE_INSTALL_PREFIX ends in /
set (CMAKE_INSTALL_PREFIX "/${CPACK_BUNDLE_NAME}.framework/Versions/${CPACK_PACKAGE_VERSION}/${CPACK_PACKAGE_NAME}/")
set (CPACK_BUNDLE_ICON "${HDF_RESOURCES_EXT_DIR}/hdf.icns")
set (CPACK_BUNDLE_PLIST "${HDF4_BINARY_DIR}/CMakeFiles/Info.plist")
- set (CPACK_APPLE_GUI_INFO_STRING "Hierarchical Data Format (HDF) Software Library and Utilities")
- set (CPACK_APPLE_GUI_COPYRIGHT "Copyright © 2006-2014 by The HDF Group. All rights reserved.")
set (CPACK_SHORT_VERSION_STRING "${CPACK_PACKAGE_VERSION}")
- set (CPACK_APPLE_GUI_BUNDLE_NAME "${HDF4_PACKAGE_STRING}")
- set (CPACK_APPLE_GUI_VERSION_STRING "${CPACK_PACKAGE_VERSION_STRING}")
- set (CPACK_APPLE_GUI_SHORT_VERSION_STRING "${CPACK_PACKAGE_VERSION}")
#-----------------------------------------------------------------------------
# Configure the Info.plist file for the install bundle
#-----------------------------------------------------------------------------
configure_file (
- ${HDF_RESOURCES_EXT_DIR}/CPack.Info.plist.in
+ ${HDF_RESOURCES_DIR}/CPack.Info.plist.in
${HDF4_BINARY_DIR}/CMakeFiles/Info.plist @ONLY
)
configure_file (
@@ -355,25 +389,49 @@ if (NOT HDF4_EXTERNALLY_CONFIGURED AND NOT HDF4_NO_PACKAGES)
)
install (
FILES ${HDF4_BINARY_DIR}/CMakeFiles/PkgInfo
- ${HDF4_BINARY_DIR}/CMakeFiles/version.plist
DESTINATION ..
)
- endif (HDF4_PACK_MACOSX_BUNDLE)
+ endif (HDF4_PACK_MACOSX_FRAMEWORK AND HDF4_BUILD_FRAMEWORKS)
else (WIN32)
- list (APPEND CPACK_GENERATOR "STGZ")
+ list (APPEND CPACK_GENERATOR "STGZ")
set (CPACK_PACKAGING_INSTALL_PREFIX "/${CPACK_PACKAGE_INSTALL_DIRECTORY}")
set (CPACK_COMPONENTS_ALL_IN_ONE_PACKAGE ON)
set (CPACK_DEBIAN_PACKAGE_SECTION "Libraries")
set (CPACK_DEBIAN_PACKAGE_MAINTAINER "${HDF4_PACKAGE_BUGREPORT}")
-
+
+# list (APPEND CPACK_GENERATOR "RPM")
+ set (CPACK_RPM_PACKAGE_RELEASE "1")
set (CPACK_RPM_COMPONENT_INSTALL ON)
set (CPACK_RPM_PACKAGE_RELOCATABLE ON)
set (CPACK_RPM_PACKAGE_LICENSE "BSD-style")
set (CPACK_RPM_PACKAGE_GROUP "Development/Libraries")
set (CPACK_RPM_PACKAGE_URL "${HDF4_PACKAGE_URL}")
+ set (CPACK_RPM_PACKAGE_SUMMARY "HDF is a unique technology suite that makes possible the management of extremely large and complex data collections.")
+ set (CPACK_RPM_PACKAGE_DESCRIPTION
+ "The HDF technology suite includes:
+
+ * A versatile data model that can represent very complex data objects and a wide variety of metadata.
+
+ * A completely portable file format with no limit on the number or size of data objects in the collection.
+
+ * A software library that runs on a range of computational platforms, from laptops to massively parallel systems, and implements a high-level API with C, C++, Fortran 90, and Java interfaces.
+
+ * A rich set of integrated performance features that allow for access time and storage space optimizations.
+
+ * Tools and applications for managing, manipulating, viewing, and analyzing the data in the collection.
+
+The HDF data model, file format, API, library, and tools are open and distributed without charge.
+"
+ )
+
+ #-----------------------------------------------------------------------------
+ # Configure the spec file for the install RPM
+ #-----------------------------------------------------------------------------
+# configure_file ("${HDF5_RESOURCES_DIR}/hdf5.spec.in" "${CMAKE_CURRENT_BINARY_DIR}/${HDF5_PACKAGE_NAME}.spec" @ONLY IMMEDIATE)
+# set (CPACK_RPM_USER_BINARY_SPECFILE "${CMAKE_CURRENT_BINARY_DIR}/${HDF5_PACKAGE_NAME}.spec")
endif (WIN32)
-
+
# By default, do not warn when built on machines using only VS Express:
if (NOT DEFINED CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_NO_WARNINGS)
set (CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_NO_WARNINGS ON)
@@ -381,88 +439,113 @@ if (NOT HDF4_EXTERNALLY_CONFIGURED AND NOT HDF4_NO_PACKAGES)
include (InstallRequiredSystemLibraries)
set (CPACK_INSTALL_CMAKE_PROJECTS "${HDF4_BINARY_DIR};HDF4;ALL;/")
-
+
if (HDF4_PACKAGE_EXTLIBS)
if (HDF4_ALLOW_EXTERNAL_SUPPORT MATCHES "SVN" OR HDF4_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
if (JPEG_FOUND AND JPEG_USE_EXTERNAL)
- set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${JPEG_INCLUDE_DIR_GEN};JPEG;libraries;/")
- set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${JPEG_INCLUDE_DIR_GEN};JPEG;headers;/")
- set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${JPEG_INCLUDE_DIR_GEN};JPEG;configinstall;/")
+ if (WIN32)
+ set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${JPEG_INCLUDE_DIR_GEN};JPEG;ALL;/")
+ else (WIN32)
+ set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${JPEG_INCLUDE_DIR_GEN};JPEG;libraries;/")
+ set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${JPEG_INCLUDE_DIR_GEN};JPEG;headers;/")
+ set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${JPEG_INCLUDE_DIR_GEN};JPEG;configinstall;/")
+ endif (WIN32)
endif (JPEG_FOUND AND JPEG_USE_EXTERNAL)
if (ZLIB_FOUND AND ZLIB_USE_EXTERNAL)
- set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;libraries;/")
- set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;headers;/")
- set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;configinstall;/")
+ if (WIN32)
+ set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;ALL;/")
+ else (WIN32)
+ set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;libraries;/")
+ set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;headers;/")
+ set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;configinstall;/")
+ endif (WIN32)
endif (ZLIB_FOUND AND ZLIB_USE_EXTERNAL)
if (SZIP_FOUND AND SZIP_USE_EXTERNAL)
- set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${SZIP_INCLUDE_DIR_GEN};SZIP;libraries;/")
- set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${SZIP_INCLUDE_DIR_GEN};SZIP;headers;/")
- set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${SZIP_INCLUDE_DIR_GEN};SZIP;configinstall;/")
+ if (WIN32)
+ set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${SZIP_INCLUDE_DIR_GEN};SZIP;ALL;/")
+ else (WIN32)
+ set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${SZIP_INCLUDE_DIR_GEN};SZIP;libraries;/")
+ set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${SZIP_INCLUDE_DIR_GEN};SZIP;headers;/")
+ set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${SZIP_INCLUDE_DIR_GEN};SZIP;configinstall;/")
+ endif (WIN32)
endif (SZIP_FOUND AND SZIP_USE_EXTERNAL)
endif (HDF4_ALLOW_EXTERNAL_SUPPORT MATCHES "SVN" OR HDF4_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
endif (HDF4_PACKAGE_EXTLIBS)
include (CPack)
+ cpack_add_install_type(Full DISPLAY_NAME "Everything")
+ cpack_add_install_type(Developer)
+
+ cpack_add_component_group(Runtime)
+
+ cpack_add_component_group(Documents
+ EXPANDED
+ DESCRIPTION "Release notes for developing HDF applications"
+ )
+
+ cpack_add_component_group(Development
+ EXPANDED
+ DESCRIPTION "All of the tools you'll need to develop HDF applications"
+ )
+
+ cpack_add_component_group(Applications
+ EXPANDED
+ DESCRIPTION "Tools for HDF4 files"
+ )
+
#-----------------------------------------------------------------------------
# Now list the cpack commands
#-----------------------------------------------------------------------------
- cpack_add_component (applications
- DISPLAY_NAME "HDF4 Applications"
- DEPENDS libraries
- GROUP Applications
- )
- cpack_add_component (libraries
+ cpack_add_component (libraries
DISPLAY_NAME "HDF4 Libraries"
+ REQUIRED
GROUP Runtime
+ INSTALL_TYPES Full Developer User
)
- cpack_add_component (headers
- DISPLAY_NAME "HDF4 Headers"
+ cpack_add_component (headers
+ DISPLAY_NAME "HDF4 Headers"
DEPENDS libraries
GROUP Development
+ INSTALL_TYPES Full Developer
)
- cpack_add_component (hdfdocuments
+ cpack_add_component (hdfdocuments
DISPLAY_NAME "HDF4 Documents"
GROUP Documents
+ INSTALL_TYPES Full Developer
)
- cpack_add_component (configinstall
- DISPLAY_NAME "HDF4 CMake files"
+ cpack_add_component (configinstall
+ DISPLAY_NAME "HDF4 CMake files"
+ HIDDEN
DEPENDS libraries
GROUP Development
+ INSTALL_TYPES Full Developer User
)
if (HDF4_BUILD_FORTRAN)
- cpack_add_component (fortlibraries
- DISPLAY_NAME "HDF4 Fortran Libraries"
+ cpack_add_component (fortlibraries
+ DISPLAY_NAME "HDF4 Fortran Libraries"
DEPENDS libraries
GROUP Runtime
+ INSTALL_TYPES Full Developer User
)
endif (HDF4_BUILD_FORTRAN)
if (HDF4_BUILD_TOOLS)
- cpack_add_component (toolsapplications
- DISPLAY_NAME "HDF4 Tools Applications"
+ cpack_add_component (toolsapplications
+ DISPLAY_NAME "HDF4 Tools Applications"
DEPENDS libraries
GROUP Applications
- )
- cpack_add_component (toolsheaders
- DISPLAY_NAME "HDF4 Tools Headers"
- DEPENDS libraries
- GROUP Development
+ INSTALL_TYPES Full Developer User
)
endif (HDF4_BUILD_TOOLS)
if (HDF4_BUILD_UTILS)
- cpack_add_component (utilsapplications
- DISPLAY_NAME "HDF4 Utility Applications"
+ cpack_add_component (utilsapplications
+ DISPLAY_NAME "HDF4 Utility Applications"
DEPENDS libraries
GROUP Applications
- )
- cpack_add_component (utilsheaders
- DISPLAY_NAME "HDF4 Utility Headers"
- DEPENDS libraries
- GROUP Development
+ INSTALL_TYPES Full Developer User
)
endif (HDF4_BUILD_UTILS)
endif (NOT HDF4_EXTERNALLY_CONFIGURED AND NOT HDF4_NO_PACKAGES)
-
\ No newline at end of file
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 3209dce..63312d9 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1,19 +1,37 @@
-cmake_minimum_required (VERSION 2.8.11)
+cmake_minimum_required (VERSION 3.1.0)
PROJECT (HDF4 C CXX)
#-----------------------------------------------------------------------------
# Instructions for use : Normal Build
#
-# For standard build of HDF4 libraries,tests and tools.
+# For standard build of HDF4 libraries,tests and tools.
# Run cmake using the HDF4 source tree to generate a build tree.
# Enable/Disable options according to requirements and
# set CMAKE_INSTALL_PREFIX to the required install path.
# Make install can be used to install all components for system-wide use.
#
+if ("${CMAKE_CURRENT_SOURCE_DIR}" STREQUAL "${CMAKE_CURRENT_BINARY_DIR}")
+ MESSAGE(FATAL_ERROR "\nERROR! ${PROJECT_NAME} DOES NOT SUPPORT IN SOURCE BUILDS!\n"
+ "CMAKE_CURRENT_SOURCE_DIR=${CMAKE_CURRENT_SOURCE_DIR}"
+ " == CMAKE_CURRENT_BINARY_DIR=${CMAKE_CURRENT_BINARY_DIR}\n"
+ "NEXT STEPS:\n"
+ "(1) Delete the CMakeCache.txt file and the CMakeFiles/ directory\n"
+ " under the source directory for ${PROJECT_NAME}, otherwise you\n"
+ " will not be able to configure ${PROJECT_NAME} correctly!\n"
+ " * For example, on linux machines do:\n"
+ " $ rm -r CMakeCache.txt CMakeFiles/\n"
+ "(2) Create a different directory and configure ${PROJECT_NAME} in that directory.\n"
+ " * For example, on linux machines do:\n"
+ " $ mkdir MY_BUILD\n"
+ " $ cd MY_BUILD\n"
+ " $ cmake [OPTIONS] ..\n"
+ )
+endif ()
+
#-----------------------------------------------------------------------------
# Instructions for use : Sub-Project Build
#
-# To include HDF4 as a sub-project within another project.
+# To include HDF4 as a sub-project within another project.
# Set HDF4_EXTERNALLY_CONFIGURED to 1 in the parent project and
# supply values for the following variables...
#
@@ -29,10 +47,16 @@ PROJECT (HDF4 C CXX)
# dependencies to this variable so that external projects pick them up
#
# HDF4_EXTERNAL_LIB_PREFIX :
-# If the parent project needs to install hdf libraries, but avoid
+# If the parent project needs to install hdf libraries, but avoid
# name conflicts with system versions, then a prefix may be added
# to ensure that the correct versions configured are used.
#
+# HDF4_INSTALL_BIN_DIR, HDF4_INSTALL_LIB_DIR, HDF4_INSTALL_INCLUDE_DIR, HDF4_INSTALL_DATA_DIR :
+# Customize the 'bin', 'lib', 'include', and 'share' installation directories.
+#
+# HDF4_INSTALL_NO_DEVELOPMENT :
+# Set to true to skip installation of headers and CMake package files.
+#
# Consider this example, it builds its own zlib
# library and tells HDF4 to add it as a dependency - this ensures that
# any project making use of this build of HDF4 will use the correct zlib
@@ -52,8 +76,8 @@ PROJECT (HDF4 C CXX)
# # Setup all necessary overrides for zlib so that HDF4 uses our
# # internally compiled zlib rather than any other version
# if (HDF4_ENABLE_Z_LIB_SUPPORT)
-# # We must tell the main HDF4 library that it depends on our zlib
-# set (HDF4_LIB_DEPENDENCIES prjzlib)
+# # We must tell the main HDF4 library that it depends on our zlib
+# set (HDF4_LIB_DEPENDENCIES prjzlib)
# # Override the zlib header file
# if (PRJ_USE_SYSTEM_ZLIB)
# set (H4_ZLIB_HEADER "zlib.h")
@@ -64,17 +88,18 @@ PROJECT (HDF4 C CXX)
# set (ZLIB_LIBRARIES prjzlib)
# endif (PRJ_USE_SYSTEM_ZLIB)
# endif (HDF4_ENABLE_Z_LIB_SUPPORT)
-#
+#
# # Add the sub project
# add_subdirectory (Utilities/hdf4.2.5)
# # Add the HDF4 dirs to our include path
-# set (HDF4_INCLUDE_DIR
+# set (HDF4_INCLUDE_DIR
# ${PROJECT_SOURCE_DIR}/Utilities/hdf4.2.5/hdf/src
# ${PROJECT_BINARY_DIR}/Utilities/hdf4.2.5
# ${PROJECT_SOURCE_DIR}/Utilities/hdf4.2.5/mfhdf/libsrc
# )
#
#-----------------------------------------------------------------------------
+string(TIMESTAMP CONFIG_DATE "%Y-%m-%d")
#-----------------------------------------------------------------------------
# Allow Visual Studio solution directories
@@ -94,7 +119,6 @@ mark_as_advanced (HDF4_NO_PACKAGES)
#-----------------------------------------------------------------------------
# Set the core names of all the libraries
#-----------------------------------------------------------------------------
-set (HDF4_LIB_CORENAME "hdf4")
set (HDF4_SRC_LIB_CORENAME "hdf")
set (HDF4_SRC_FCSTUB_LIB_CORENAME "hdf_fcstub")
set (HDF4_SRC_FORTRAN_LIB_CORENAME "hdf_fortran")
@@ -106,11 +130,13 @@ set (HDF4_MF_FCSTUB_LIB_CORENAME "mfhdf_fcstub")
set (HDF4_MF_FORTRAN_LIB_CORENAME "mfhdf_fortran")
set (HDF4_MF_TEST_LIB_CORENAME "mf_test")
set (HDF4_TOOLS_LIB_CORENAME "h4tools")
+set (HDF4_JAVA_JNI_LIB_CORENAME "hdf_java")
+set (HDF4_JAVA_HDF_LIB_CORENAME "jarhdf")
+set (HDF4_JAVA_TEST_LIB_CORENAME "jartest4")
#-----------------------------------------------------------------------------
# Set the true names of all the libraries if customized by external project
#-----------------------------------------------------------------------------
-set (HDF4_LIB_NAME "${HDF4_EXTERNAL_LIB_PREFIX}${HDF4_LIB_CORENAME}")
set (HDF4_SRC_LIB_NAME "${HDF4_EXTERNAL_LIB_PREFIX}${HDF4_SRC_LIB_CORENAME}")
set (HDF4_SRC_FCSTUB_LIB_NAME "${HDF4_EXTERNAL_LIB_PREFIX}${HDF4_SRC_FCSTUB_LIB_CORENAME}")
set (HDF4_SRC_FORTRAN_LIB_NAME "${HDF4_EXTERNAL_LIB_PREFIX}${HDF4_SRC_FORTRAN_LIB_CORENAME}")
@@ -122,22 +148,38 @@ set (HDF4_MF_FCSTUB_LIB_NAME "${HDF4_EXTERNAL_LIB_PREFIX}${HDF4_MF_FCSTUB_LIB
set (HDF4_MF_FORTRAN_LIB_NAME "${HDF4_EXTERNAL_LIB_PREFIX}${HDF4_MF_FORTRAN_LIB_CORENAME}")
set (HDF4_MF_TEST_LIB_NAME "${HDF4_EXTERNAL_LIB_PREFIX}${HDF4_MF_TEST_LIB_CORENAME}")
set (HDF4_TOOLS_LIB_NAME "${HDF4_EXTERNAL_LIB_PREFIX}${HDF4_TOOLS_LIB_CORENAME}")
+set (HDF4_JAVA_JNI_LIB_NAME "${HDF4_EXTERNAL_LIB_PREFIX}${HDF4_JAVA_JNI_LIB_CORENAME}")
+set (HDF4_JAVA_HDF_LIB_NAME "${HDF4_EXTERNAL_LIB_PREFIX}${HDF4_JAVA_HDF_LIB_CORENAME}")
+set (HDF4_JAVA_TEST_LIB_NAME "${HDF4_EXTERNAL_LIB_PREFIX}${HDF4_JAVA_TEST_LIB_CORENAME}")
#-----------------------------------------------------------------------------
# Set the target names of all the libraries
#-----------------------------------------------------------------------------
-set (HDF4_LIB_TARGET ${HDF4_LIB_CORENAME})
-set (HDF4_SRC_LIB_TARGET ${HDF4_SRC_LIB_CORENAME})
-set (HDF4_SRC_FCSTUB_LIB_TARGET ${HDF4_SRC_FCSTUB_LIB_CORENAME})
-set (HDF4_SRC_FORTRAN_LIB_TARGET ${HDF4_SRC_FORTRAN_LIB_CORENAME})
-set (HDF4_MF_LIB_TARGET ${HDF4_MF_LIB_CORENAME})
-set (HDF4_MF_XDR_LIB_TARGET ${HDF4_MF_XDR_LIB_CORENAME})
-set (HDF4_HDF_TEST_LIB_TARGET ${HDF4_HDF_TEST_LIB_CORENAME})
-set (HDF4_HDF_TEST_FCSTUB_LIB_TARGET ${HDF4_HDF_TEST_FCSTUB_LIB_CORENAME})
-set (HDF4_MF_FCSTUB_LIB_TARGET ${HDF4_MF_FCSTUB_LIB_CORENAME})
-set (HDF4_MF_FORTRAN_LIB_TARGET ${HDF4_MF_FORTRAN_LIB_CORENAME})
-set (HDF4_MF_TEST_LIB_TARGET ${HDF4_MF_TEST_LIB_CORENAME})
-set (HDF4_TOOLS_LIB_TARGET ${HDF4_TOOLS_LIB_CORENAME})
+set (HDF4_SRC_LIB_TARGET "${HDF4_SRC_LIB_CORENAME}-static")
+set (HDF4_SRC_FCSTUB_LIB_TARGET "${HDF4_SRC_FCSTUB_LIB_CORENAME}-static")
+set (HDF4_SRC_FORTRAN_LIB_TARGET "${HDF4_SRC_FORTRAN_LIB_CORENAME}-static")
+set (HDF4_MF_LIB_TARGET "${HDF4_MF_LIB_CORENAME}-static")
+set (HDF4_MF_XDR_LIB_TARGET "${HDF4_MF_XDR_LIB_CORENAME}-static")
+set (HDF4_HDF_TEST_LIB_TARGET "${HDF4_HDF_TEST_LIB_CORENAME}-static")
+set (HDF4_HDF_TEST_FCSTUB_LIB_TARGET "${HDF4_HDF_TEST_FCSTUB_LIB_CORENAME}-static")
+set (HDF4_MF_FCSTUB_LIB_TARGET "${HDF4_MF_FCSTUB_LIB_CORENAME}-static")
+set (HDF4_MF_FORTRAN_LIB_TARGET "${HDF4_MF_FORTRAN_LIB_CORENAME}-static")
+set (HDF4_MF_TEST_LIB_TARGET "${HDF4_MF_TEST_LIB_CORENAME}-static")
+set (HDF4_TOOLS_LIB_TARGET "${HDF4_TOOLS_LIB_CORENAME}-static")
+set (HDF4_JAVA_JNI_LIB_TARGET "${HDF4_JAVA_JNI_LIB_CORENAME}")
+set (HDF4_JAVA_HDF_LIB_TARGET "${HDF4_JAVA_HDF_LIB_CORENAME}")
+set (HDF4_JAVA_TEST_LIB_TARGET "${HDF4_JAVA_TEST_LIB_CORENAME}")
+set (HDF4_SRC_LIBSH_TARGET "${HDF4_SRC_LIB_CORENAME}-shared")
+set (HDF4_SRC_FCSTUB_LIBSH_TARGET "${HDF4_SRC_FCSTUB_LIB_CORENAME}-shared")
+set (HDF4_SRC_FORTRAN_LIBSH_TARGET "${HDF4_SRC_FORTRAN_LIB_CORENAME}-shared")
+set (HDF4_MF_LIBSH_TARGET "${HDF4_MF_LIB_CORENAME}-shared")
+set (HDF4_MF_XDR_LIBSH_TARGET "${HDF4_MF_XDR_LIB_CORENAME}-shared")
+set (HDF4_HDF_TEST_LIBSH_TARGET "${HDF4_HDF_TEST_LIB_CORENAME}-shared")
+set (HDF4_HDF_TEST_FCSTUB_LIBSH_TARGET "${HDF4_HDF_TEST_FCSTUB_LIB_CORENAME}-shared")
+set (HDF4_MF_FCSTUB_LIBSH_TARGET "${HDF4_MF_FCSTUB_LIB_CORENAME}-shared")
+set (HDF4_MF_FORTRAN_LIBSH_TARGET "${HDF4_MF_FORTRAN_LIB_CORENAME}-shared")
+set (HDF4_MF_TEST_LIBSH_TARGET "${HDF4_MF_TEST_LIB_CORENAME}-shared")
+set (HDF4_TOOLS_LIBSH_TARGET "${HDF4_TOOLS_LIB_CORENAME}-shared")
#-----------------------------------------------------------------------------
# Define some CMake variables for use later in the project
@@ -150,13 +192,34 @@ set (HDF4_MFHDFSOURCE_DIR ${HDF4_SOURCE_DIR}/mfhdf/libsrc)
set (HDF4_MFHDF_TEST_DIR ${HDF4_SOURCE_DIR}/mfhdf/test)
set (HDF4_MFHDF_FORTRAN_DIR ${HDF4_SOURCE_DIR}/mfhdf/fortran)
set (HDF4_MFHDF_XDR_DIR ${HDF4_SOURCE_DIR}/mfhdf/xdr)
+set (HDF4_JAVA_JNI_SRC_DIR ${HDF4_SOURCE_DIR}/java/src/jni)
+set (HDF4_JAVA_HDF_SRC_DIR ${HDF4_SOURCE_DIR}/java/src/hdf)
+set (HDF4_JAVA_TEST_SRC_DIR ${HDF4_SOURCE_DIR}/java/test)
+set (HDF4_JAVA_LIB_DIR ${HDF4_SOURCE_DIR}/java/lib)
+set (HDF4_JAVA_LOGGING_JAR ${HDF4_SOURCE_DIR}/java/lib/slf4j-api-1.7.5.jar)
+set (HDF4_JAVA_LOGGING_NOP_JAR ${HDF4_SOURCE_DIR}/java/lib/ext/slf4j-nop-1.7.5.jar)
+set (HDF4_JAVA_LOGGING_SIMPLE_JAR ${HDF4_SOURCE_DIR}/java/lib/ext/slf4j-simple-1.7.5.jar)
+
+if (APPLE)
+ option (HDF4_BUILD_FRAMEWORKS "TRUE to build as frameworks libraries, FALSE to build according to BUILD_SHARED_LIBS" FALSE)
+endif (APPLE)
if (NOT HDF4_INSTALL_BIN_DIR)
set (HDF4_INSTALL_BIN_DIR bin)
- set (HDF4_INSTALL_UTILS_BIN_DIR ${HDF4_INSTALL_BIN_DIR}) #${HDF4_INSTALL_BIN_DIR}/utils
- set (HDF4_INSTALL_TOOLS_BIN_DIR ${HDF4_INSTALL_BIN_DIR}) #${HDF4_INSTALL_BIN_DIR}/tools
+ set (HDF4_INSTALL_UTILS_BIN_DIR ${HDF4_INSTALL_BIN_DIR})
+ set (HDF4_INSTALL_TOOLS_BIN_DIR ${HDF4_INSTALL_BIN_DIR})
endif (NOT HDF4_INSTALL_BIN_DIR)
if (NOT HDF4_INSTALL_LIB_DIR)
+ if (APPLE)
+ if (HDF4_BUILD_FRAMEWORKS)
+ set (HDF4_INSTALL_JAR_DIR ../Java)
+ else (HDF4_BUILD_FRAMEWORKS)
+ set (HDF4_INSTALL_JAR_DIR lib)
+ endif (HDF4_BUILD_FRAMEWORKS)
+ set (HDF4_INSTALL_FMWK_DIR ${CMAKE_INSTALL_FRAMEWORK_PREFIX})
+ else (APPLE)
+ set (HDF4_INSTALL_JAR_DIR lib)
+ endif (APPLE)
set (HDF4_INSTALL_LIB_DIR lib)
endif (NOT HDF4_INSTALL_LIB_DIR)
if (NOT HDF4_INSTALL_INCLUDE_DIR)
@@ -164,6 +227,14 @@ if (NOT HDF4_INSTALL_INCLUDE_DIR)
endif (NOT HDF4_INSTALL_INCLUDE_DIR)
if (NOT HDF4_INSTALL_DATA_DIR)
if (NOT WIN32)
+ if (APPLE)
+ if (HDF4_BUILD_FRAMEWORKS)
+ set (HDF4_INSTALL_EXTRA_DIR ../SharedSupport)
+ else (HDF4_BUILD_FRAMEWORKS)
+ set (HDF4_INSTALL_EXTRA_DIR share)
+ endif (HDF4_BUILD_FRAMEWORKS)
+ set (HDF4_INSTALL_FWRK_DIR ${CMAKE_INSTALL_FRAMEWORK_PREFIX})
+ endif (APPLE)
set (HDF4_INSTALL_DATA_DIR share)
set (HDF4_INSTALL_CMAKE_DIR share/cmake)
else (NOT WIN32)
@@ -172,6 +243,10 @@ if (NOT HDF4_INSTALL_DATA_DIR)
endif (NOT WIN32)
endif (NOT HDF4_INSTALL_DATA_DIR)
+if(DEFINED ADDITIONAL_CMAKE_PREFIX_PATH AND EXISTS "${ADDITIONAL_CMAKE_PREFIX_PATH}")
+ set (CMAKE_PREFIX_PATH ${ADDITIONAL_CMAKE_PREFIX_PATH} ${CMAKE_PREFIX_PATH})
+endif(DEFINED ADDITIONAL_CMAKE_PREFIX_PATH AND EXISTS "${ADDITIONAL_CMAKE_PREFIX_PATH}")
+
#-----------------------------------------------------------------------------
# parse the full version number from hfile.h and include in H4_VERS_INFO
#-----------------------------------------------------------------------------
@@ -226,6 +301,11 @@ if (NOT HDF4_EXTERNALLY_CONFIGURED)
set (CMAKE_Fortran_MODULE_DIRECTORY
${PROJECT_BINARY_DIR}/bin/fortran CACHE PATH "Single Directory for all fortran modules."
)
+ if (WIN32)
+ set (CMAKE_TEST_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${CMAKE_BUILD_TYPE})
+ else (WIN32)
+ set (CMAKE_TEST_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY})
+ endif (WIN32)
else (NOT HDF4_EXTERNALLY_CONFIGURED)
# if we are externally configured, but the project uses old cmake scripts
# this may not be set and utilities like Hdetect will fail
@@ -260,21 +340,23 @@ include (${HDF_RESOURCES_DIR}/ConfigureChecks.cmake)
set (CMAKE_INCLUDE_CURRENT_DIR_IN_INTERFACE ON)
#-----------------------------------------------------------------------------
-# Option to Build Shared/Static libs, default is static
+# Mac OS X Options
+#-----------------------------------------------------------------------------
+if (HDF4_BUILD_FRAMEWORKS AND NOT BUILD_SHARED_LIBS)
+ set (BUILD_SHARED_LIBS ON CACHE BOOL "Build Shared Libraries")
+endif (HDF4_BUILD_FRAMEWORKS AND NOT BUILD_SHARED_LIBS)
+
+#-----------------------------------------------------------------------------
+# Option to Build Shared and Static libs, default is static
#-----------------------------------------------------------------------------
option (BUILD_SHARED_LIBS "Build Shared Libraries" OFF)
-set (LIB_TYPE STATIC)
set (H4_ENABLE_SHARED_LIB NO)
-set (H4_ENABLE_STATIC_LIB NO)
if (BUILD_SHARED_LIBS)
- set (LIB_TYPE SHARED)
- set (H4_BUILT_AS_DYNAMIC_LIB 1)
set (H4_ENABLE_SHARED_LIB YES)
-else (BUILD_SHARED_LIBS)
- set (H4_BUILT_AS_STATIC_LIB 1)
- set (H4_ENABLE_STATIC_LIB YES)
- set (CMAKE_POSITION_INDEPENDENT_CODE ON)
+ set (LINK_SHARED_LIBS ${LINK_LIBS})
endif (BUILD_SHARED_LIBS)
+set (H4_ENABLE_STATIC_LIB YES)
+set (CMAKE_POSITION_INDEPENDENT_CODE ON)
add_definitions (-DBIG_LONGS -DSWAP)
add_definitions (-DHAVE_CONFIG_H)
@@ -301,7 +383,7 @@ option (HDF4_ENABLE_COVERAGE "Enable code coverage for Libraries and Programs" O
if (HDF4_ENABLE_COVERAGE)
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -O0 -fprofile-arcs -ftest-coverage")
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0 -fprofile-arcs -ftest-coverage")
- set (LDFLAGS "${LDFLAGS} -fprofile-arcs -ftest-coverage")
+ set (LDFLAGS "${LDFLAGS} -fprofile-arcs -ftest-coverage")
endif (HDF4_ENABLE_COVERAGE)
#-----------------------------------------------------------------------------
@@ -309,18 +391,18 @@ endif (HDF4_ENABLE_COVERAGE)
#-----------------------------------------------------------------------------
option (HDF4_ENABLE_DEPRECATED_SYMBOLS "Enable deprecated public API symbols" ON)
if (HDF4_ENABLE_DEPRECATED_SYMBOLS)
- set (H4_NO_DEPRECATED_SYMBOLS 0)
+ set (H4_NO_DEPRECATED_SYMBOLS 0)
else (HDF4_ENABLE_DEPRECATED_SYMBOLS)
- set (H4_NO_DEPRECATED_SYMBOLS 1)
+ set (H4_NO_DEPRECATED_SYMBOLS 1)
endif (HDF4_ENABLE_DEPRECATED_SYMBOLS)
#-----------------------------------------------------------------------------
# Include the main src and config directories
#-----------------------------------------------------------------------------
set (HDF4_INCLUDE_DIRECTORIES
- ${HDF4_HDFSOURCE_DIR}
- ${HDF4_MFHDFSOURCE_DIR}
- ${HDF4_SOURCE_DIR}
+ ${HDF4_HDFSOURCE_DIR}
+ ${HDF4_MFHDFSOURCE_DIR}
+ ${HDF4_SOURCE_DIR}
${HDF4_BINARY_DIR}
${CMAKE_RUNTIME_OUTPUT_DIRECTORY}
)
@@ -440,7 +522,7 @@ if (HDF4_ENABLE_PARALLEL)
# MPI checks, only do these if MPI_C_FOUND is true, otherwise they always fail
# and once set, they are cached as false and not regenerated
set (CMAKE_REQUIRED_LIBRARIES "${MPI_C_LIBRARIES}" )
- CHECK_FUNCTION_EXISTS (MPI_File_get_size H4_HAVE_MPI_GET_SIZE)
+ CHECK_FUNCTION_EXISTS (MPI_File_get_size H4_HAVE_MPI_GET_SIZE)
# Used by Fortran + MPI
CHECK_SYMBOL_EXISTS (MPI_Comm_c2f "${MPI_C_INCLUDE_PATH}/mpi.h" H4_HAVE_MPI_MULTI_LANG_Comm)
CHECK_SYMBOL_EXISTS (MPI_Info_c2f "${MPI_C_INCLUDE_PATH}/mpi.h" H4_HAVE_MPI_MULTI_LANG_Info)
@@ -462,6 +544,9 @@ endif (H4_HAVE_PARALLEL)
#-----------------------------------------------------------------------------
include (UserMacros.cmake)
+#-----------------------------------------------------------------------------
+# Include filter (zlib, szip, etc.) macros
+#-----------------------------------------------------------------------------
include (CMakeFilters.cmake)
#-----------------------------------------------------------------------------
@@ -478,7 +563,7 @@ if (NOT HDF4_EXTERNALLY_CONFIGURED)
if (HDF4_ENABLE_Z_LIB_SUPPORT AND ZLIB_FOUND)
PACKAGE_ZLIB_LIBRARY (${HDF4_ALLOW_EXTERNAL_SUPPORT})
endif (HDF4_ENABLE_Z_LIB_SUPPORT AND ZLIB_FOUND)
-
+
if (HDF4_ENABLE_SZIP_SUPPORT AND SZIP_FOUND)
PACKAGE_SZIP_LIBRARY (${HDF4_ALLOW_EXTERNAL_SUPPORT})
endif (HDF4_ENABLE_SZIP_SUPPORT AND SZIP_FOUND)
@@ -506,23 +591,35 @@ add_subdirectory (${HDF4_SOURCE_DIR}/mfhdf/libsrc ${PROJECT_BINARY_DIR}/mfhdf/
if (HDF4_ALLOW_EXTERNAL_SUPPORT MATCHES "SVN" OR HDF4_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
if (JPEG_FOUND AND JPEG_USE_EXTERNAL)
add_dependencies (${HDF4_SRC_LIB_TARGET} JPEG)
+ if (BUILD_SHARED_LIBS)
+ add_dependencies (${HDF4_SRC_LIBSH_TARGET} JPEG)
+ endif (BUILD_SHARED_LIBS)
endif (JPEG_FOUND AND JPEG_USE_EXTERNAL)
if (ZLIB_FOUND AND ZLIB_USE_EXTERNAL)
add_dependencies (${HDF4_SRC_LIB_TARGET} ZLIB)
+ if (BUILD_SHARED_LIBS)
+ add_dependencies (${HDF4_SRC_LIBSH_TARGET} ZLIB)
+ endif (BUILD_SHARED_LIBS)
endif (ZLIB_FOUND AND ZLIB_USE_EXTERNAL)
if (SZIP_FOUND AND SZIP_USE_EXTERNAL)
add_dependencies (${HDF4_SRC_LIB_TARGET} SZIP)
+ if (BUILD_SHARED_LIBS)
+ add_dependencies (${HDF4_SRC_LIBSH_TARGET} SZIP)
+ endif (BUILD_SHARED_LIBS)
endif (SZIP_FOUND AND SZIP_USE_EXTERNAL)
endif (HDF4_ALLOW_EXTERNAL_SUPPORT MATCHES "SVN" OR HDF4_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
if (HDF4_BUILD_XDR_LIB)
add_dependencies (${HDF4_MF_LIB_TARGET} ${HDF4_MF_XDR_LIB_TARGET})
+ if (BUILD_SHARED_LIBS)
+ add_dependencies (${HDF4_MF_LIBSH_TARGET} ${HDF4_MF_XDR_LIBSH_TARGET})
+ endif (BUILD_SHARED_LIBS)
endif (HDF4_BUILD_XDR_LIB)
#-----------------------------------------------------------------------------
# Dashboard and Testing Settings
#-----------------------------------------------------------------------------
-option (BUILD_TESTING "Build HDF4 Unit Testing" OFF)
+option (BUILD_TESTING "Build HDF4 Unit Testing" ON)
if (BUILD_TESTING)
set (DART_TESTING_TIMEOUT 1200
CACHE INTEGER
@@ -530,6 +627,10 @@ if (BUILD_TESTING)
)
enable_testing ()
include (CTest)
+
+ include (${HDF4_SOURCE_DIR}/CTestConfig.cmake)
+ configure_file (${HDF_RESOURCES_DIR}/CTestCustom.cmake ${HDF4_BINARY_DIR}/CTestCustom.ctest @ONLY)
+
if (NOT HDF4_EXTERNALLY_CONFIGURED)
if (EXISTS "${HDF4_SOURCE_DIR}/hdf/test" AND IS_DIRECTORY "${HDF4_SOURCE_DIR}/hdf/test")
add_subdirectory (${HDF4_SOURCE_DIR}/hdf/test ${PROJECT_BINARY_DIR}/hdf/test)
@@ -538,8 +639,6 @@ if (BUILD_TESTING)
add_subdirectory (${HDF4_SOURCE_DIR}/mfhdf/test ${PROJECT_BINARY_DIR}/mfhdf/test)
endif (EXISTS "${HDF4_SOURCE_DIR}/mfhdf/test" AND IS_DIRECTORY "${HDF4_SOURCE_DIR}/mfhdf/test")
endif (NOT HDF4_EXTERNALLY_CONFIGURED)
- include (${HDF4_SOURCE_DIR}/CTestConfig.cmake)
- configure_file (${HDF_RESOURCES_DIR}/CTestCustom.cmake ${HDF4_BINARY_DIR}/CTestCustom.ctest @ONLY)
endif (BUILD_TESTING)
#-----------------------------------------------------------------------------
@@ -596,6 +695,16 @@ if (EXISTS "${HDF4_SOURCE_DIR}/mfhdf/examples" AND IS_DIRECTORY "${HDF4_SOURCE_D
endif (EXISTS "${HDF4_SOURCE_DIR}/mfhdf/examples" AND IS_DIRECTORY "${HDF4_SOURCE_DIR}/mfhdf/examples")
#-----------------------------------------------------------------------------
+# Option to build HDF4 Java Library
+#-----------------------------------------------------------------------------
+if (EXISTS "${HDF4_SOURCE_DIR}/java" AND IS_DIRECTORY "${HDF4_SOURCE_DIR}/java")
+ option (HDF4_BUILD_JAVA "Build Java HDF Library" OFF)
+ if (HDF4_BUILD_JAVA)
+ add_subdirectory (${HDF4_SOURCE_DIR}/java ${PROJECT_BINARY_DIR}/java)
+ endif (HDF4_BUILD_JAVA)
+endif (EXISTS "${HDF4_SOURCE_DIR}/java" AND IS_DIRECTORY "${HDF4_SOURCE_DIR}/java")
+
+#-----------------------------------------------------------------------------
# Generate the H4config.h file containing user settings needed by compilation
#-----------------------------------------------------------------------------
configure_file (${HDF_RESOURCES_DIR}/h4config.h.in ${PROJECT_BINARY_DIR}/h4config.h @ONLY)
diff --git a/COPYING b/COPYING
index 825f1ce..ecd09c0 100644
--- a/COPYING
+++ b/COPYING
@@ -4,7 +4,7 @@ Hierarchical Data Format (HDF) Software Library and Utilities
---------------------------------------------------------------------------
Hierarchical Data Format (HDF) Software Library and Utilities
-Copyright 2006-2015 by The HDF Group.
+Copyright 2006-2016 by The HDF Group.
NCSA Hierarchical Data Format (HDF) Software Library and Utilities
Copyright 1988-2006 by the Board of Trustees of the University of Illinois.
diff --git a/CTestConfig.cmake b/CTestConfig.cmake
index 6bc8702..ae5da48 100644
--- a/CTestConfig.cmake
+++ b/CTestConfig.cmake
@@ -9,10 +9,10 @@ set (CTEST_NIGHTLY_START_TIME "18:00:00 CST")
set (CTEST_DROP_METHOD "http")
if (CDASH_LOCAL)
- set (CTEST_DROP_SITE "72.36.68.252")
+ set (CTEST_DROP_SITE "10.10.10.82")
set (CTEST_DROP_LOCATION "/submit.php?project=HDF4")
else (CDASH_LOCAL)
- set (CTEST_DROP_SITE "cdash.hdfgroup.uiuc.edu")
+ set (CTEST_DROP_SITE "cdash.hdfgroup.org")
set (CTEST_DROP_LOCATION "/submit.php?project=HDF4")
endif (CDASH_LOCAL)
set (CTEST_DROP_SITE_CDASH TRUE)
diff --git a/MANIFEST b/MANIFEST
index da8c4d8..ee3c6bd 100644
--- a/MANIFEST
+++ b/MANIFEST
@@ -31,12 +31,31 @@
./libhdf4.settings.in
./move-if-change
+./m4/ax_check_class.m4
+./m4/ax_check_classpath.m4
+./m4/ax_check_java_home.m4
+./m4/ax_check_junit.m4
+./m4/ax_check_rqrd_class.m4
+./m4/ax_java_check_class.m4
+./m4/ax_java_options.m4
+./m4/ax_jni_include_dir.m4
+./m4/ax_prog_jar.m4
+./m4/ax_prog_java_cc.m4
+./m4/ax_prog_java_works.m4
+./m4/ax_prog_java.m4
+./m4/ax_prog_javac_works.m4
+./m4/ax_prog_javac.m4
+./m4/ax_prog_javadoc.m4
+./m4/ax_prog_javah.m4
+./m4/ax_try_compile_java.m4
+./m4/ax_try_run_java.m4
./m4/ltsugar.m4
./m4/libtool.m4
./m4/ltversion.m4
./m4/lt~obsolete.m4
./m4/ltoptions.m4
+./bin/bbrelease _DO_NOT_DISTRIBUTE_
./bin/chkmanifest
./bin/cmakehdf4
./bin/compile
@@ -44,6 +63,7 @@
./bin/config.sub
./bin/depcomp
./bin/deploy
+./bin/h4_release_check.sh
./bin/install-sh
./bin/locate_sw
./bin/ltmain.sh
@@ -480,6 +500,89 @@
./hdf/util/testfiles/fp2hdf/pal.hdf
./hdf/util/testfiles/fp2hdf/t100x100
+./java/CMakeLists.txt
+./java/Makefile.am
+./java/Makefile.in
+./java/examples/CMakeLists.txt
+./java/examples/Makefile.am
+./java/examples/Makefile.in
+./java/examples/HDF4DatasetCreate.java
+./java/examples/HDF4FileCreate.java
+./java/examples/HDF4GroupCreate.java
+./java/lib/hamcrest-core.jar
+./java/lib/junit.jar
+./java/lib/simplelogger.properties
+./java/lib/slf4j-api-1.7.5.jar
+./java/lib/ext/slf4j-nop-1.7.5.jar
+./java/lib/ext/slf4j-simple-1.7.5.jar
+./java/src/CMakeLists.txt
+./java/src/Makefile.am
+./java/src/Makefile.in
+./java/src/hdf/CMakeLists.txt
+./java/src/hdf/overview.html
+./java/src/hdf/hdflib/CMakeLists.txt
+./java/src/hdf/hdflib/HDFArray.java
+./java/src/hdf/hdflib/HDFChunkInfo.java
+./java/src/hdf/hdflib/HDFCompInfo.java
+./java/src/hdf/hdflib/HDFConstants.java
+./java/src/hdf/hdflib/HDFDeflateCompInfo.java
+./java/src/hdf/hdflib/HDFException.java
+./java/src/hdf/hdflib/HDFIMCOMPCompInfo.java
+./java/src/hdf/hdflib/HDFJavaException.java
+./java/src/hdf/hdflib/HDFJPEGCompInfo.java
+./java/src/hdf/hdflib/HDFLibrary.java
+./java/src/hdf/hdflib/HDFLibraryException.java
+./java/src/hdf/hdflib/HDFNativeData.java
+./java/src/hdf/hdflib/HDFNBITChunkInfo.java
+./java/src/hdf/hdflib/HDFNBITCompInfo.java
+./java/src/hdf/hdflib/HDFNewCompInfo.java
+./java/src/hdf/hdflib/HDFNotImplementedException.java
+./java/src/hdf/hdflib/HDFOldCompInfo.java
+./java/src/hdf/hdflib/HDFOldRLECompInfo.java
+./java/src/hdf/hdflib/HDFRLECompInfo.java
+./java/src/hdf/hdflib/HDFSKPHUFFCompInfo.java
+./java/src/hdf/hdflib/HDFSZIPCompInfo.java
+./java/src/jni/CMakeLists.txt
+./java/src/jni/Makefile.am
+./java/src/jni/Makefile.in
+./java/src/jni/h4jni.h
+./java/src/jni/hdfanImp.c
+./java/src/jni/hdfdfpalImp.c
+./java/src/jni/hdfdfuImp.c
+./java/src/jni/hdfexceptionImp.c
+./java/src/jni/hdfgrImp.c
+./java/src/jni/hdfheImp.c
+./java/src/jni/hdfhxImp.c
+./java/src/jni/hdfImp.c
+./java/src/jni/hdfnativeImp.c
+./java/src/jni/hdfr24Imp.c
+./java/src/jni/hdfr8Imp.c
+./java/src/jni/hdfsdsImp.c
+./java/src/jni/hdfstructsutil.c
+./java/src/jni/hdfvdataImp.c
+./java/src/jni/hdfvfImp.c
+./java/src/jni/hdfvgroupImp.c
+./java/src/jni/hdfvhImp.c
+./java/src/jni/hdfvqImp.c
+./java/src/jni/hdfvsqImp.c
+./java/test/CMakeLists.txt
+./java/test/Makefile.am
+./java/test/Makefile.in
+./java/test/junit.sh.in
+./java/test/JUnit-interface.ert
+./java/test/JUnit-interface.txt
+./java/test/TestAll.java
+./java/test/TestH4.java
+./java/test/TestH4ANparams.java
+./java/test/TestH4DFparams.java
+./java/test/TestH4DFPparams.java
+./java/test/TestH4DFRparams.java
+./java/test/TestH4GRparams.java
+./java/test/TestH4HCparams.java
+./java/test/TestH4SDparams.java
+./java/test/TestH4Vparams.java
+./java/test/TestH4VSparams.java
+
./man/Makefile.am
./man/Makefile.in
./man/gr_chunk.3
@@ -518,6 +621,7 @@
./mfhdf/dumper/testfiles/IMCOMP.hdf
./mfhdf/dumper/testfiles/LongDataset.hdf
./mfhdf/dumper/testfiles/Roy.nc
+./mfhdf/dumper/testfiles/Roy-64.nc
./mfhdf/dumper/testfiles/SDSlongname.hdf
./mfhdf/dumper/testfiles/Tables.hdf
./mfhdf/dumper/testfiles/Tables_External_File
@@ -568,6 +672,7 @@
./mfhdf/dumper/testfiles/dumpsds-16.out
./mfhdf/dumper/testfiles/dumpsds-17.out
./mfhdf/dumper/testfiles/dumpsds-18.out
+./mfhdf/dumper/testfiles/dumpsds-19.out
./mfhdf/dumper/testfiles/dumpvd-10.out
./mfhdf/dumper/testfiles/dumpvd-11.out
./mfhdf/dumper/testfiles/dumpvd-12.out
@@ -931,6 +1036,8 @@
./mfhdf/test/hdftest.h
./mfhdf/test/sds_szipped.dat
./mfhdf/test/smallslice.0000.nc
+./mfhdf/test/Roy.nc
+./mfhdf/test/Roy-64.nc
./mfhdf/test/tattdatainfo.c
./mfhdf/test/tattributes.c
./mfhdf/test/tchunk.c
@@ -940,6 +1047,7 @@
./mfhdf/test/tdatasizes.c
./mfhdf/test/tdim.c
./mfhdf/test/temptySDSs.c
+./mfhdf/test/texternal.c
./mfhdf/test/test1.nc
./mfhdf/test/testout.sav
./mfhdf/test/testmfhdf.sh.in
@@ -1031,24 +1139,29 @@
./config/cmake/cacheinit.cmake
./config/cmake/ConfigureChecks.cmake
+./config/cmake/CPack.Info.plist.in
./config/cmake/CTestCustom.cmake
./config/cmake/FindHDF4.cmake.in
+./config/cmake/FindJNI.cmake
./config/cmake/h4config.h.in
./config/cmake/HDF4_Examples.cmake.in
./config/cmake/hdf4-config-version.cmake.in
-./config/cmake/hdf4-config.cmake.build.in
-./config/cmake/hdf4-config.cmake.install.in
+./config/cmake/hdf4-config.cmake.in
./config/cmake/HDF4Macros.cmake
./config/cmake/HDF4UseFortran.cmake
+./config/cmake/jrunTest.cmake
./config/cmake/libhdf4.settings.cmake.in
./config/cmake/mccacheinit.cmake
./config/cmake/NSIS.InstallOptions.ini.in
+./config/cmake/patch.xml
./config/cmake/PkgInfo.in
./config/cmake/README.txt.cmake.in
+./config/cmake/UseJava.cmake
+./config/cmake/UseJavaClassFilelist.cmake
+./config/cmake/UseJavaSymlinks.cmake
./config/cmake_ext_mod/CheckTypeSize.cmake
./config/cmake_ext_mod/ConfigureChecks.cmake
-./config/cmake_ext_mod/CPack.Info.plist.in
./config/cmake_ext_mod/CTestCustom.cmake
./config/cmake_ext_mod/FindMPI.cmake _DO_NOT_DISTRIBUTE_
./config/cmake_ext_mod/FindSZIP.cmake
diff --git a/Makefile.am b/Makefile.am
index e146a28..65f7a0f 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -4,7 +4,13 @@
include $(top_srcdir)/config/commence.am
-SUBDIRS = hdf mfhdf man
+if BUILD_JAVA_CONDITIONAL
+ JAVA_DIR=java
+else
+ JAVA_DIR=
+endif
+
+SUBDIRS = hdf mfhdf $(JAVA_DIR) man
# install libhdf4.settings in lib directory
settingsdir = $(libdir)
settings_DATA = libhdf4.settings
diff --git a/Makefile.in b/Makefile.in
index e9a80c1..67dc12b 100644
--- a/Makefile.in
+++ b/Makefile.in
@@ -92,7 +92,20 @@ DIST_COMMON = $(top_srcdir)/config/commence.am $(srcdir)/Makefile.in \
$(top_srcdir)/bin/ltmain.sh $(top_srcdir)/bin/missing
subdir = .
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \
@@ -185,7 +198,7 @@ am__define_uniq_tagged_files = \
ETAGS = etags
CTAGS = ctags
CSCOPE = cscope
-DIST_SUBDIRS = $(SUBDIRS)
+DIST_SUBDIRS = hdf mfhdf java man
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
distdir = $(PACKAGE)-$(VERSION)
top_distdir = $(distdir)
@@ -270,12 +283,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -318,11 +346,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
@@ -396,7 +427,9 @@ ACLOCAL_AMFLAGS = "-I m4"
# .chkexe files are used to mark tests that have run successfully.
# .chklog files are output from those tests.
CHECK_CLEANFILES = *.chkexe *.chklog
-SUBDIRS = hdf mfhdf man
+ at BUILD_JAVA_CONDITIONAL_FALSE@JAVA_DIR =
+ at BUILD_JAVA_CONDITIONAL_TRUE@JAVA_DIR = java
+SUBDIRS = hdf mfhdf $(JAVA_DIR) man
# install libhdf4.settings in lib directory
settingsdir = $(libdir)
settings_DATA = libhdf4.settings
diff --git a/README.txt b/README.txt
index 6d397d9..4feb53b 100644
--- a/README.txt
+++ b/README.txt
@@ -1,16 +1,25 @@
-HDF version 4.2.11 released on 2015-02-09
+HDF version 4.2.12 released on 2016-06-29
===================================================
-Obtaining the latest version of HDF4
-------------------------------------
- The most recent version of the distribution can be obtained from
- the The HDF Group's FTP site:
+Information about HDF
+---------------------
- ftp://ftp.hdfgroup.org/HDF/HDF_Current/
+ The HDF (or HDF4) home page is located here:
+ https://www.hdfgroup.org/products/hdf4/index.html
+ The most recent version of the software can be obtained from:
+
+ https://www.hdfgroup.org/release4/obtain.html
+
+ The HDF documentation can be found in:
+
+ https://www.hdfgroup.org/release4/doc.html
+
+
Source Distribution Layout
--------------------------
+
The top level of the source code distribution contains the following
subdirectories:
@@ -24,6 +33,8 @@ Source Distribution Layout
Please see the README in each directory for further
information on each package.
+ java -- The Java HDF JNI library
+
mfhdf -- The netCDF(mfhdf) part of the HDF/mfhdf distribution and
additional HDF utilities, such as hdp, hrepack, hdfimport, etc.
@@ -34,20 +45,13 @@ Source Distribution Layout
Descriptions of new features and bug fixes in this release.
Files in this sub-directory can be used as supplemental
documentation for HDF.
- These files are also available on the THG FTP server:
- ftp://ftp.hdfgroup.org/HDF/HDF_Current/src/unpacked/release_notes
-
- windows-- Removed 4/2013.
Third Party Software Requirements
---------------------------------
- 1. JPEG distribution release 6b(libjpeg.a). You may download the software
- from http://www.hdfgroup.org/release4/obtain.html.
-
- 2. ZLIB 1.1.4(libz.a) or later distribution. You may download the software
- from the http://www.gzip.org/ site.
+ * JPEG distribution release 6b or later.
+ * ZLIB 1.1.4(libz.a) or later.
System Requirements
@@ -63,28 +67,3 @@ Configuring/Testing/Installing
See the INSTALL file for instructions on configuring, testing,
and installing this software on Unix and non-UNIX systems.
-
-DOCUMENTATION/FAQ/HELP
-----------------------
- The HDF documentation can be found on the THG FTP server and on
- the THG website:
-
- http://hdfgroup.org/doc.html
- ftp://ftp.hdfgroup.org/HDF/Documentation
-
- The HDF home page is at:
-
- http://hdfgroup.org
-
- An FAQ is available on the FTP server and as under "Information about
- HDF" on the website.
-
- http://hdfgroup.org/products/hdf4/index.html
-
- If you have any questions or comments, or would like to be added to
- or removed from our hdfnews email list, contact information and other
- resource information can be found on the HDF Support page:
-
- http://hdfgroup.org/services/support.html
-
-
diff --git a/aclocal.m4 b/aclocal.m4
index 1ecfa26..b9e6632 100644
--- a/aclocal.m4
+++ b/aclocal.m4
@@ -1183,6 +1183,19 @@ AC_SUBST([am__tar])
AC_SUBST([am__untar])
]) # _AM_PROG_TAR
+m4_include([m4/ax_check_class.m4])
+m4_include([m4/ax_check_classpath.m4])
+m4_include([m4/ax_check_java_home.m4])
+m4_include([m4/ax_check_junit.m4])
+m4_include([m4/ax_java_options.m4])
+m4_include([m4/ax_jni_include_dir.m4])
+m4_include([m4/ax_prog_jar.m4])
+m4_include([m4/ax_prog_java.m4])
+m4_include([m4/ax_prog_java_works.m4])
+m4_include([m4/ax_prog_javac.m4])
+m4_include([m4/ax_prog_javac_works.m4])
+m4_include([m4/ax_prog_javadoc.m4])
+m4_include([m4/ax_try_compile_java.m4])
# libtool.m4 - Configure libtool for the host system. -*-Autoconf-*-
#
# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
diff --git a/bin/chkmanifest b/bin/chkmanifest
index 69cd728..da4a98a 100755
--- a/bin/chkmanifest
+++ b/bin/chkmanifest
@@ -12,6 +12,7 @@
verbose=yes
MANIFEST=/tmp/HD_MANIFEST.$$
SVNENTRY=/tmp/HD_SVNENTRY.$$
+USE_SVN_LS=""
# function definitions
@@ -129,28 +130,49 @@ done
# I don't know what the official svn format is if such a specification exists.
# Algorithm:
# If the first line of the file has 'xml version="1.0"' in it, it is created
-# by svn 1.3 or older; else if it has '^file$' in it, it is created by svn 1.4.
+# by svn 1.3 or older; else if it has '^file$' in it, it is created by
+# svn 1.4 - 1.6. Otherwise it is assumed to be newer, and 'svn ls -R'
+# will give a list of checked in files.
svn_entry_file=.svn/entries
if head -1 $svn_entry_file | grep 'xml version="1.0"' > /dev/null 2>&1;then
getsvnentries=GETSVNENTRIES_13
elif grep '^file$' $svn_entry_file > /dev/null 2>&1; then
getsvnentries=GETSVNENTRIES_14
else
- echo "Unknown svn entries format. Aborted"
- exit 1
+ USE_SVN_LS="yes"
+# echo "Unknown svn entries format. Aborted"
+# exit 1
fi
-for svn in `find . -type d -name .svn -print`; do
- path=`echo $svn |sed 's/\/.svn//'`
- for file in `$getsvnentries $svn`; do
- if (grep ^$path/$file$ $MANIFEST >/dev/null); then
- :
- else
- echo "+ $path/$file"
- fail=yes
- fi
+if [ -z "${USE_SVN_LS}" ]; then
+ for svn in `find . -type d -name .svn -print`; do
+ path=`echo $svn |sed 's/\/.svn//'`
+ for file in `$getsvnentries $svn`; do
+ if (grep ^$path/$file$ $MANIFEST >/dev/null); then
+ :
+ else
+ echo "+ $path/$file"
+ fail=yes
+ fi
+ done
done
-done
+else
+ # Next check svn ls, which gets a list of all files that are
+ # checked in.
+ svn_ls=`svn ls -R`
+ for file in $svn_ls; do
+ path="./${file}"
+ # Ignore directories
+ if [ ! -d $path ]; then
+ if (grep ^$path$ $MANIFEST >/dev/null); then
+ :
+ else
+ echo "+ $path"
+ fail=yes
+ fi
+ fi
+ done
+fi
if [ "X$fail" = "Xyes" ]; then
cat 1>&2 <<EOF
diff --git a/bin/cmakehdf4 b/bin/cmakehdf4
index 19f0c47..85174ce 100755
--- a/bin/cmakehdf4
+++ b/bin/cmakehdf4
@@ -114,7 +114,7 @@ fi
echo Running Cmake for HDF4-${version} ...
# 4. Configure the C library, tools and tests with this command:
-STEP "Configure..." "cmake -C $srcdir/config/cmake/cacheinit.cmake -G 'Unix Makefiles' -DCMAKE_INSTALL_PREFIX:PATH="." -DHDF4_ALLOW_EXTERNAL_SUPPORT:STRING="SVN" -DHDF4_PACKAGE_EXTLIBS:BOOL=ON -DHDF4_BUILD_TOOLS:BOOL=ON -DHDF4_BUILD_UTILS:BOOL=ON -DBUILD_TESTING:BOOL=ON $srcdir" $configlog
+STEP "Configure..." "cmake -C $srcdir/config/cmake/cacheinit.cmake -G 'Unix Makefiles' -DCMAKE_INSTALL_PREFIX:PATH="$PWD" -DHDF4_ALLOW_EXTERNAL_SUPPORT:STRING="" -DHDF4_PACKAGE_EXTLIBS:BOOL=OFF -DHDF4_BUILD_TOOLS:BOOL=ON -DHDF4_BUILD_UTILS:BOOL=ON -DBUILD_TESTING:BOOL=ON $srcdir" $configlog
# 5. Build the C library, tools and tests with this command:
STEP "Build the library, tools and tests, ..." "cmake --build . --config Release" $makelog
@@ -190,7 +190,7 @@ EOF
#==========
#==========
cat > $cfgfile <<'EOF'
-cmake_minimum_required(VERSION 2.8.10 FATAL_ERROR)
+cmake_minimum_required(VERSION 3.1.0 FATAL_ERROR)
########################################################
# This dashboard is maintained by The HDF Group
# For any comments please contact cdashhelp at hdfgroup.org
diff --git a/bin/h4_release_check.sh b/bin/h4_release_check.sh
new file mode 100755
index 0000000..0bdd439
--- /dev/null
+++ b/bin/h4_release_check.sh
@@ -0,0 +1,231 @@
+#!/bin/bash
+red=$(tput setaf 1)
+green=$(tput setaf 2)
+yellow=$(tput setaf 3)
+magenta=$(tput setaf 5)
+reset=$(tput sgr0)
+pass=0
+fail=1
+
+##################################################
+# TESTING CONTENTS OF lib/libhdf4.settings file.
+##################################################
+
+if [ ! -f ./lib/libhdf4.settings ]; then
+ echo " $red FAILED$reset: ./lib/libhdf4.settings DOES NOT EXIST"
+ exit 1
+fi
+
+# CHECKING FOR SHARED LIBRARIES
+if grep -q "Shared Libraries: no" ./lib/libhdf4.settings; then
+ echo "Checking ./lib/libhdf4.settings -- Shared Libraries ... $green PASSED$reset"
+else
+ echo "Checking ./lib/libhdf4.settings -- Shared Libraries ... $red FAILED$reset"
+ echo " ... Should be 'Shared Libraries: no'"
+fi
+
+# CHECKING FOR STATIC LIBRARIES
+if grep -q "Static Libraries: yes" ./lib/libhdf4.settings; then
+ echo "Checking ./lib/libhdf4.settings -- Static Libraries ... $green PASSED$reset"
+else
+ echo "Checking ./lib/libhdf4.settings -- Shared Libraries ... $red FAILED$reset"
+ echo " ... Should be 'Static Libraries: yes'"
+fi
+
+# CHECKING FOR FORTRAN ENABLED
+if grep -q "Fortran: yes" ./lib/libhdf4.settings; then
+ echo "Checking ./lib/libhdf4.settings -- Fortran Support ... $green PASSED$reset"
+else
+ echo "Checking ./lib/libhdf4.settings -- Fortran Support ... $red FAILED$reset"
+ echo " ... Should be 'Fortran: yes'"
+fi
+
+# CHECKING COMPILATION MODE
+if grep -q "Compilation Mode: production" ./lib/libhdf4.settings; then
+ echo "Checking ./lib/libhdf4.settings -- Compilation Mode ... $green PASSED$reset"
+else
+ echo "Checking ./lib/libhdf4.settings -- Compilation Mode ... $red FAILED$reset"
+ echo " ... Should be 'Compilation Mode: production'"
+fi
+
+# CHECKING OPTIMIZATION
+if grep -q " -g " ./lib/libhdf4.settings; then
+ echo "Checking ./lib/libhdf4.settings -- Optimization Mode ... $red FAILED$reset"
+ echo " ... Should be no there is no '-g' flag among compiler flags"
+else
+ echo "Checking ./lib/libhdf4.settings -- Optimization Mode ... $green PASSED$reset"
+fi
+
+#CHECKING COMPRESSION
+if grep -q " -ljpeg " ./lib/libhdf4.settings; then
+ echo "Checking ./lib/libhdf4.settings -- JPEG COMPRESSION ... $green PASSED$reset"
+else
+ echo "Checking ./lib/libhdf4.settings -- JPEG COMPRESSION ... $yellow WARNING$reset"
+ echo " ... JPEG compression was not found"
+fi
+if grep -q " -lz " ./lib/libhdf4.settings; then
+ echo "Checking ./lib/libhdf4.settings -- ZLIB COMPRESSION ... $green PASSED$reset"
+else
+ echo "Checking ./lib/libhdf4.settings -- ZLIB COMPRESSION ... $red FAILED$reset"
+ echo " ... ZLIB compression is required"
+fi
+
+if grep -q "SZIP compression: enabled" ./lib/libhdf4.settings; then
+ echo "Checking ./lib/libhdf4.settings -- SZIP COMPRESSION ... $green PASSED$reset"
+else
+ echo "Checking ./lib/libhdf4.settings -- SZIP COMPRESSION ... $red FAILED$reset"
+ echo " ... SZIP compression should be enabled"
+fi
+
+
+
+#########################
+# TESTING IF FILES EXISTS
+#########################
+
+include_file=(atom.h bitvect.h cdeflate.h cnbit.h cnone.h crle.h cskphuff.h cszip.h dfan.h
+dffunc.f90 dffunc.inc dfgr.h df.h dfi.h dfrig.h dfsd.h dfstubs.h dfufp2i.h dynarray.h H4api_adpt.h
+h4config.h hbitio.h hchunks.h hcomp.h hcompi.h hconv.h hdatainfo.h hdf2netcdf.h hdf.f90
+hdf.h hdfi.h hdf.inc herr.h hfile.h hkit.h hlimits.h hntdefs.h hproto_fortran.h hproto.h
+htags.h linklist.h local_nc.h mfan.h mfdatainfo.h mffunc.f90 mffunc.inc mfgr.h mfhdf.h
+mfhdfi.h mstdio.h netcdf.f90 netcdf.h netcdf.inc tbbt.h vattr.h vg.h)
+
+lib_file=(libdf.a libdf.la libhdf4.settings libjpeg.a libmfhdf.a libmfhdf.la libsz.a libz.a)
+
+bin_file=(gif2hdf h4cc h4fc h4redeploy hdf24to8 hdf2gif hdf2jpeg hdf8to24 hdfcomp
+hdfed hdfimport hdfls hdfpack hdftopal hdftor8 hdfunpac hdiff hdp hrepack jpeg2hdf
+ncdump ncgen paltohdf r8tohdf ristosds vmake vshow)
+
+# TEST TO MAKE SURE THE DIRECTORIES EXISTS
+
+if [ ! -d ./include ]; then
+ echo " FAILED: ./include DIRECTORY DOES NOT EXIST"
+ exit
+fi
+if [ ! -d ./bin ]; then
+ echo " FAILED: ./bin DIRECTORY DOES NOT EXIST"
+ exit
+fi
+if [ ! -d ./lib ]; then
+ echo " FAILED: ./lib DIRECTORY DOES NOT EXIST"
+ exit
+fi
+
+# TESTING FOR INCLUDE FILES
+
+status=$pass
+for var in "${include_file[@]}"
+do
+ if [ ! -f ./include/$var ]; then
+ echo "FILE$red ./include/${var}$reset DOES NOT EXIST"
+ status=$fail
+ fi
+done
+
+if [ "$status" -eq "$fail" ]; then
+ echo "Testing for include files ... $red FAILED $reset"
+else
+ echo "Testing for include files ... $green PASSED $reset"
+fi
+
+# TESTING FOR LIB FILES
+status=$pass
+for var in "${lib_file[@]}"
+do
+ if [ ! -f ./lib/$var ]; then
+ echo "FILE$red ./lib/${var}$reset DOES NOT EXIST"
+ status=$fail
+ fi
+done
+
+if [ "$status" -eq "$fail" ]; then
+ echo "Testing for lib files ... $red FAILED $reset"
+else
+ echo "Testing for lib files ... $green PASSED $reset"
+fi
+
+# TESTING FOR BIN FILES
+status=$pass
+for var in "${bin_file[@]}"
+do
+ if [ ! -f ./bin/$var ]; then
+ echo "FILE$red ./bin/${var}$reset DOES NOT EXIST"
+ status=$fail
+ fi
+done
+
+if [ "$status" -eq "$fail" ]; then
+ echo "Testing for bin files ... $red FAILED $reset"
+else
+ echo "Testing for bin files ... $green PASSED $reset"
+fi
+
+# Check settings in libhdf5.settings match what is in RELEASE.txt
+
+# Check the OS type
+
+UnameInfo=`grep -i 'Uname information:' lib/libhdf4.settings`
+OStype=`echo $UnameInfo | cut -d ":" -f 2 | cut -d " " -f 4`
+
+# This code will change the OStype to remove all the code between
+# the "- first number" and ".el". For example 2.6.32-573.22.1.el6.x86_64
+# will become 2.6.32-573.el6.x86_64
+#
+#OSsize=`echo ${OStype} | cut -d "e" -f 2`
+#OSsize=`echo .e${OSsize}`
+#
+#kernel=`echo ${OStype} | cut -d "-" -f 1`
+#vendor_kernel=`echo ${OStype} | cut -d "-" -f 2 | cut -d "." -f 1`
+#OStype=`echo ${kernel}-${vendor_kernel}${OSsize}`
+
+libCC=`grep -i 'C compiler' lib/libhdf4.settings`
+libFC=`grep -i 'Fortran Compiler' lib/libhdf4.settings`
+
+IFS='%'
+
+if grep -i -q "$OStype" RELEASE.txt; then
+ echo "Testing for OS type ... $green PASSED $reset"
+# Check the processor
+ line_start=`grep -in "$OStype" RELEASE.txt | cut -d : -f 1`
+
+# print to screen to have the user check compiler versions
+ echo "$yellow __ ________ _____ _____ ________ __ $reset"
+ echo "$yellow \ \ / / ____| __ \|_ _| ____\ \ / / $reset"
+ echo "$yellow \ \ / /| |__ | |__) | | | | |__ \ \_/ / $reset"
+ echo "$yellow \ \/ / | __| | _ / | | | __| \ / $reset"
+ echo "$yellow \ / | |____| | \ \ _| |_| | | | $reset"
+ echo "$yellow \/ |______|_| \_\_____|_| |_| $reset"
+ echo ""
+
+
+ echo "$yellow (1) VERIFY: C Compiler in libhdf5.settings: $reset"
+ echo " $libCC"
+ echo "$yellow should match a compiler listed in RELEASE.txt $reset"
+ line_end=$line_start-2
+ sed -n "$line_start,/^$/p" RELEASE.txt |
+ {
+ while read line; do
+ ((line_end++))
+ ## counter=$[$line_endcounter +1]
+ done
+ sed -n $line_start,${line_end}p RELEASE.txt
+ }
+else
+ echo "Testing for OS type ... $red FAILED $reset"
+ echo " Operating system $OStype not found in RELEASE.txt"
+fi
+
+echo ""
+echo "$yellow (2) VERIFY: Binary executables do not requre any external $reset"
+echo "$yellow libraries (except system libraries on some machines): $reset"
+
+echo `echo " Result from ${magenta}ldd bin/hdp${reset}: "; ldd bin/hdp`
+echo ""
+
+
+echo ""
+echo "$yellow (3) VERIFY: Binary executables match libhdf5.settings $reset"
+
+echo `echo " Result from ${magenta}file bin/hdp${reset}: "; file bin/hdp`
+echo `echo " Result from lib/libhdf4.settings: "; echo ${UnameInfo}`
+echo ""
diff --git a/bin/h4vers b/bin/h4vers
index 0daa4bb..ef5f200 100755
--- a/bin/h4vers
+++ b/bin/h4vers
@@ -366,7 +366,7 @@ if ($MANHDF1) {
# the current month and year and the updated version.
print FILE $newverline, "\n";
$man_hdf1_updates += 1;
- } elsif ($line =~ m/^$verstr\s*$/ || $line =~ m/^$verstr-[a-z]{2,5}\d{1,3}\s*$/ || $line =~ m/^$verstr\scurrently under development\s*$/ || $line =~ m/^$verstr-[a-z]{2,5}\d{1,3}\scurrently under development$\s*/) {
+ } elsif ($line =~ m/^$verstr\s*$/ || $line =~ m/^$verstr-[a-z]{1,5}\d{1,8}\s*$/ || $line =~ m/^$verstr\scurrently under development\s*$/ || $line =~ m/^$verstr-[a-z]{1,5}\d{1,8}\scurrently under development$\s*/) {
# match the line further down that has just the version string, and replace it with the updated version string.
# this will match "major.minor.release" or "major.minor.release-subrelease" or "major.minor.release currently
# under development" or "major.minor.release-subrelease currently under development" (all followed by
diff --git a/config/apple b/config/apple
index e267a35..13ebe0a 100644
--- a/config/apple
+++ b/config/apple
@@ -38,17 +38,17 @@ case $CC_BASENAME in
DEBUG_CFLAGS="-g -ansi -Wall -pedantic "
DEBUG_CPPFLAGS=
# There is a bug somewhere in mfhdf/libsrc that is exposed by compiling
- # with any optimization in $CC in Lion, Mountain Lion and Mavericks systems.
+ # with any optimization in $CC in Lion (Darwin 11) & up
# Use -O0 for now.
case "$host_os" in
- darwin11.* | darwin12.* | darwin13.*) # Lion & Mountain Lion & Mavericks
+ darwin1[1-9].*) # Lion & Mountain Lion & Mavericks & Yosemeti
xOFLAG="-O0"
;;
*) # Other OSX versions
xOFLAG="-O2"
;;
esac
- PROD_CFLAGS=${PROD_CFLAGS:-"-ansi -Wall -pedantic $xOFLAG"}
+ PROD_CFLAGS=${PROD_CFLAGS:-"$xOFLAG"}
PROD_CPPFLAGS=
PROFILE_CFLAGS="-pg"
PROFILE_CPPFLAGS=
@@ -59,10 +59,10 @@ case $CC_BASENAME in
DEBUG_CFLAGS="-g"
DEBUG_CPPFLAGS=
# There is a bug somewhere in mfhdf/libsrc that is exposed by compiling
- # with any optimization in $CC in Lion, Mountain Lion and Mavericks systems.
+ # with any optimization in $CC in Lion, Mountain Lion, Mavericks and Yosemeti systems.
# Use -O0 for now.
case "$host_os" in
- darwin11.* | darwin12.* | darwin13.*) # Lion & Mountain Lion & Mavericks
+ darwin1[1-9].*) # Lion & Mountain Lion & Mavericks & Yosemite & 5 more
xOFLAG="-O0"
;;
*) # Other OSX versions
@@ -80,10 +80,10 @@ case $CC_BASENAME in
DEBUG_CFLAGS="-g -ansi -Wall -pedantic "
DEBUG_CPPFLAGS=
# There is a bug somewhere in mfhdf/libsrc that is exposed by compiling
- # with any optimization in $CC in Lion, Mountain Lion and Mavericks systems.
+ # with any optimization in $CC in Lion, Mountain Lion, Mavericks and Yosemeti systems.
# Use -O0 for now.
case "$host_os" in
- darwin11.* | darwin12.* | darwin13.*) # Lion & Mountain Lion & Mavericks
+ darwin1[1-9].*) # Lion & Mountain Lion & Mavericks & Yosemeti & 5 more
xOFLAG="-O0"
;;
*) # Other OSX versions
diff --git a/config/cmake/CPack.Info.plist.in b/config/cmake/CPack.Info.plist.in
new file mode 100644
index 0000000..08d371b
--- /dev/null
+++ b/config/cmake/CPack.Info.plist.in
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+ <key>CFBundleExecutable</key>
+ <string>@CPACK_PACKAGE_FILE_NAME@</string>
+ <key>CFBundleIconFile</key>
+ <string>@CPACK_BUNDLE_ICON@</string>
+ <key>CFBundleIdentifier</key>
+ <string>org. at CPACK_PACKAGE_VENDOR@. at CPACK_PACKAGE_NAME@@CPACK_MODULE_VERSION_SUFFIX@</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundlePackageType</key>
+ <string>FMWK</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>@CPACK_PACKAGE_VERSIO@</string>
+ <key>CFBundleShortVersionString</key>
+ <string>@CPACK_SHORT_VERSION_STRING@</string>
+ <key>CSResourcesFileMapped</key>
+ <true/>
+</dict>
+</plist>
diff --git a/config/cmake/ConfigureChecks.cmake b/config/cmake/ConfigureChecks.cmake
index fada58b..aa0010c 100644
--- a/config/cmake/ConfigureChecks.cmake
+++ b/config/cmake/ConfigureChecks.cmake
@@ -3,7 +3,6 @@
#-----------------------------------------------------------------------------
set (HDF_PREFIX "H4")
include (${HDF_RESOURCES_EXT_DIR}/ConfigureChecks.cmake)
-include (${CMAKE_ROOT}/Modules/TestForSTDNamespace.cmake)
#-----------------------------------------------------------------------------
# Option to Build HDF4 versions of NetCDF-3 APIS
diff --git a/config/cmake/FindJNI.cmake b/config/cmake/FindJNI.cmake
new file mode 100644
index 0000000..440a988
--- /dev/null
+++ b/config/cmake/FindJNI.cmake
@@ -0,0 +1,342 @@
+#.rst:
+# FindJNI
+# -------
+#
+# Find JNI java libraries.
+#
+# This module finds if Java is installed and determines where the
+# include files and libraries are. It also determines what the name of
+# the library is. The caller may set variable JAVA_HOME to specify a
+# Java installation prefix explicitly.
+#
+# This module sets the following result variables:
+#
+# ::
+#
+# JNI_INCLUDE_DIRS = the include dirs to use
+# JNI_LIBRARIES = the libraries to use
+# JNI_FOUND = TRUE if JNI headers and libraries were found.
+# JAVA_AWT_LIBRARY = the path to the jawt library
+# JAVA_JVM_LIBRARY = the path to the jvm library
+# JAVA_INCLUDE_PATH = the include path to jni.h
+# JAVA_INCLUDE_PATH2 = the include path to jni_md.h
+# JAVA_AWT_INCLUDE_PATH = the include path to jawt.h
+
+#=============================================================================
+# Copyright 2001-2009 Kitware, Inc.
+#
+# Distributed under the OSI-approved BSD License (the "License");
+# see accompanying file Copyright.txt for details.
+#
+# This software is distributed WITHOUT ANY WARRANTY; without even the
+# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the License for more information.
+#=============================================================================
+# (To distribute this file outside of CMake, substitute the full
+# License text for the above reference.)
+
+# Expand {libarch} occurences to java_libarch subdirectory(-ies) and set ${_var}
+macro(java_append_library_directories _var)
+ # Determine java arch-specific library subdir
+ # Mostly based on openjdk/jdk/make/common/shared/Platform.gmk as of openjdk
+ # 1.6.0_18 + icedtea patches. However, it would be much better to base the
+ # guess on the first part of the GNU config.guess platform triplet.
+ if(CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64")
+ if(CMAKE_LIBRARY_ARCHITECTURE STREQUAL "x86_64-linux-gnux32")
+ set(_java_libarch "x32" "amd64" "i386")
+ else()
+ set(_java_libarch "amd64" "i386")
+ endif()
+ elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^i.86$")
+ set(_java_libarch "i386")
+ elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^alpha")
+ set(_java_libarch "alpha")
+ elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^arm")
+ # Subdir is "arm" for both big-endian (arm) and little-endian (armel).
+ set(_java_libarch "arm")
+ elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^mips")
+ # mips* machines are bi-endian mostly so processor does not tell
+ # endianess of the underlying system.
+ set(_java_libarch "${CMAKE_SYSTEM_PROCESSOR}" "mips" "mipsel" "mipseb" "mips64" "mips64el" "mipsn32" "mipsn32el")
+ elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^(powerpc|ppc)64le")
+ set(_java_libarch "ppc64" "ppc64le")
+ elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^(powerpc|ppc)64")
+ set(_java_libarch "ppc64" "ppc")
+ elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^(powerpc|ppc)")
+ set(_java_libarch "ppc")
+ elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^sparc")
+ # Both flavours can run on the same processor
+ set(_java_libarch "${CMAKE_SYSTEM_PROCESSOR}" "sparc" "sparcv9")
+ elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^(parisc|hppa)")
+ set(_java_libarch "parisc" "parisc64")
+ elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^s390")
+ # s390 binaries can run on s390x machines
+ set(_java_libarch "${CMAKE_SYSTEM_PROCESSOR}" "s390" "s390x")
+ elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^sh")
+ set(_java_libarch "sh")
+ else()
+ set(_java_libarch "${CMAKE_SYSTEM_PROCESSOR}")
+ endif()
+
+ # Append default list architectures if CMAKE_SYSTEM_PROCESSOR was empty or
+ # system is non-Linux (where the code above has not been well tested)
+ if(NOT _java_libarch OR NOT (CMAKE_SYSTEM_NAME MATCHES "Linux"))
+ list(APPEND _java_libarch "i386" "amd64" "ppc")
+ endif()
+
+ # Sometimes ${CMAKE_SYSTEM_PROCESSOR} is added to the list to prefer
+ # current value to a hardcoded list. Remove possible duplicates.
+ list(REMOVE_DUPLICATES _java_libarch)
+
+ foreach(_path ${ARGN})
+ if(_path MATCHES "{libarch}")
+ foreach(_libarch ${_java_libarch})
+ string(REPLACE "{libarch}" "${_libarch}" _newpath "${_path}")
+ list(APPEND ${_var} "${_newpath}")
+ endforeach()
+ else()
+ list(APPEND ${_var} "${_path}")
+ endif()
+ endforeach()
+endmacro()
+
+#include(${CMAKE_CURRENT_LIST_DIR}/CMakeFindJavaCommon.cmake)
+INCLUDE (CMakeFindJavaCommon)
+
+# Save CMAKE_FIND_FRAMEWORK
+if(DEFINED CMAKE_FIND_FRAMEWORK)
+ set(_JNI_CMAKE_FIND_FRAMEWORK ${CMAKE_FIND_FRAMEWORK})
+else()
+ unset(_JNI_CMAKE_FIND_FRAMEWORK)
+endif()
+
+if(_JAVA_HOME_EXPLICIT)
+ set(CMAKE_FIND_FRAMEWORK NEVER)
+endif()
+
+set(JAVA_AWT_LIBRARY_DIRECTORIES)
+if(_JAVA_HOME)
+ JAVA_APPEND_LIBRARY_DIRECTORIES(JAVA_AWT_LIBRARY_DIRECTORIES
+ ${_JAVA_HOME}/jre/lib/{libarch}
+ ${_JAVA_HOME}/jre/lib
+ ${_JAVA_HOME}/lib/{libarch}
+ ${_JAVA_HOME}/lib
+ ${_JAVA_HOME}
+ )
+endif()
+get_filename_component(java_install_version
+ "[HKEY_LOCAL_MACHINE\\SOFTWARE\\JavaSoft\\Java Development Kit;CurrentVersion]" NAME)
+
+list(APPEND JAVA_AWT_LIBRARY_DIRECTORIES
+ "[HKEY_LOCAL_MACHINE\\SOFTWARE\\JavaSoft\\Java Development Kit\\1.4;JavaHome]/lib"
+ "[HKEY_LOCAL_MACHINE\\SOFTWARE\\JavaSoft\\Java Development Kit\\1.3;JavaHome]/lib"
+ "[HKEY_LOCAL_MACHINE\\SOFTWARE\\JavaSoft\\Java Development Kit\\${java_install_version};JavaHome]/lib"
+ )
+JAVA_APPEND_LIBRARY_DIRECTORIES(JAVA_AWT_LIBRARY_DIRECTORIES
+ /usr/java/lib
+ /usr/java/jre/lib
+ /usr/lib
+ /usr/lib64
+ /usr/local/lib
+ /usr/local/lib64
+ /usr/lib/jvm/java/lib
+ /usr/lib64/jvm/java/lib
+ /usr/lib/java/jre/lib/{libarch}
+ /usr/lib64/java/jre/lib/{libarch}
+ /usr/lib/jvm/jre/lib/{libarch}
+ /usr/lib64/jvm/jre/lib/{libarch}
+ /usr/local/lib/java/jre/lib/{libarch}
+ /usr/local/share/java/jre/lib/{libarch}
+ /usr/lib/j2sdk1.4-sun/jre/lib/{libarch}
+ /usr/lib/j2sdk1.5-sun/jre/lib/{libarch}
+ /opt/sun-jdk-1.5.0.04/jre/lib/{libarch}
+ /usr/lib/jvm/java-6-sun/jre/lib/{libarch}
+ /usr/lib/jvm/java-1.5.0-sun/jre/lib/{libarch}
+ /usr/lib/jvm/java-6-sun-1.6.0.00/jre/lib/{libarch} # can this one be removed according to #8821 ? Alex
+ /usr/lib/jvm/java-6-openjdk/jre/lib/{libarch}
+ /usr/lib/jvm/java-7-openjdk/jre/lib/{libarch}
+ /usr/lib/jvm/java-7-openjdk-{libarch}/jre/lib/{libarch}
+ /usr/lib/jvm/java-1.6.0-openjdk-1.6.0.0/jre/lib/{libarch} # fedora
+ # Debian specific paths for default JVM
+ /usr/lib/jvm/default-java/jre/lib/{libarch}
+ /usr/lib/jvm/default-java/jre/lib
+ /usr/lib/jvm/default-java/lib
+ # OpenBSD specific paths for default JVM
+ /usr/local/jdk-1.7.0/jre/lib/{libarch}
+ /usr/local/jre-1.7.0/lib/{libarch}
+ /usr/local/jdk-1.6.0/jre/lib/{libarch}
+ /usr/local/jre-1.6.0/lib/{libarch}
+ # SuSE specific paths for default JVM
+ /usr/lib64/jvm/java/jre/lib/{libarch}
+ /usr/lib64/jvm/jre/lib/{libarch}
+ )
+
+set(JAVA_JVM_LIBRARY_DIRECTORIES)
+foreach(dir ${JAVA_AWT_LIBRARY_DIRECTORIES})
+ list(APPEND JAVA_JVM_LIBRARY_DIRECTORIES
+ "${dir}"
+ "${dir}/client"
+ "${dir}/server"
+ # IBM SDK, Java Technology Edition, specific paths
+ "${dir}/j9vm"
+ "${dir}/default"
+ )
+endforeach()
+
+set(JAVA_AWT_INCLUDE_DIRECTORIES)
+if(_JAVA_HOME)
+ list(APPEND JAVA_AWT_INCLUDE_DIRECTORIES ${_JAVA_HOME}/include)
+endif()
+list(APPEND JAVA_AWT_INCLUDE_DIRECTORIES
+ "[HKEY_LOCAL_MACHINE\\SOFTWARE\\JavaSoft\\Java Development Kit\\1.4;JavaHome]/include"
+ "[HKEY_LOCAL_MACHINE\\SOFTWARE\\JavaSoft\\Java Development Kit\\1.3;JavaHome]/include"
+ "[HKEY_LOCAL_MACHINE\\SOFTWARE\\JavaSoft\\Java Development Kit\\${java_install_version};JavaHome]/include"
+ ${_JAVA_HOME}/include
+ /usr/include
+ /usr/java/include
+ /usr/local/include
+ /usr/lib/java/include
+ /usr/lib64/java/include
+ /usr/local/lib/java/include
+ /usr/lib/jvm/java/include
+ /usr/lib64/jvm/java/include
+ /usr/lib/jvm/java-6-sun/include
+ /usr/lib/jvm/java-1.5.0-sun/include
+ /usr/lib/jvm/java-6-sun-1.6.0.00/include # can this one be removed according to #8821 ? Alex
+ /usr/lib/jvm/java-6-openjdk/include
+ /usr/lib/jvm/java-7-openjdk/include
+ /usr/lib/jvm/java-7-openjdk-i386/include
+ /usr/lib/jvm/java-7-openjdk-amd64/include
+ /usr/lib64/jvm/java-7-openjdk/include
+ /usr/lib64/jvm/java-7-openjdk-amd64/include
+ /usr/local/share/java/include
+ /usr/lib/j2sdk1.4-sun/include
+ /usr/lib/j2sdk1.5-sun/include
+ /opt/sun-jdk-1.5.0.04/include
+ # Debian specific path for default JVM
+ /usr/lib/jvm/default-java/include
+ # OpenBSD specific path for default JVM
+ /usr/local/jdk-1.7.0/include
+ /usr/local/jdk-1.6.0/include
+ # SuSE specific paths for default JVM
+ /usr/lib64/jvm/java/include
+ )
+
+foreach(JAVA_PROG "${JAVA_RUNTIME}" "${JAVA_COMPILE}" "${JAVA_ARCHIVE}")
+ get_filename_component(jpath "${JAVA_PROG}" PATH)
+ foreach(JAVA_INC_PATH ../include ../java/include ../share/java/include)
+ if(EXISTS ${jpath}/${JAVA_INC_PATH})
+ list(APPEND JAVA_AWT_INCLUDE_DIRECTORIES "${jpath}/${JAVA_INC_PATH}")
+ endif()
+ endforeach()
+ foreach(JAVA_LIB_PATH
+ ../lib ../jre/lib ../jre/lib/i386
+ ../java/lib ../java/jre/lib ../java/jre/lib/i386
+ ../share/java/lib ../share/java/jre/lib ../share/java/jre/lib/i386)
+ if(EXISTS ${jpath}/${JAVA_LIB_PATH})
+ list(APPEND JAVA_AWT_LIBRARY_DIRECTORIES "${jpath}/${JAVA_LIB_PATH}")
+ endif()
+ endforeach()
+endforeach()
+
+if(APPLE)
+ if(CMAKE_FIND_FRAMEWORK STREQUAL "ONLY")
+ set(_JNI_SEARCHES FRAMEWORK)
+ elseif(CMAKE_FIND_FRAMEWORK STREQUAL "NEVER")
+ set(_JNI_SEARCHES NORMAL)
+ elseif(CMAKE_FIND_FRAMEWORK STREQUAL "LAST")
+ set(_JNI_SEARCHES NORMAL FRAMEWORK)
+ else()
+ set(_JNI_SEARCHES FRAMEWORK NORMAL)
+ endif()
+ set(_JNI_FRAMEWORK_JVM NAMES JavaVM)
+ set(_JNI_FRAMEWORK_JAWT "${_JNI_FRAMEWORK_JVM}")
+else()
+ set(_JNI_SEARCHES NORMAL)
+endif()
+
+set(_JNI_NORMAL_JVM
+ NAMES jvm
+ PATHS ${JAVA_JVM_LIBRARY_DIRECTORIES}
+ )
+
+set(_JNI_NORMAL_JAWT
+ NAMES jawt
+ PATHS ${JAVA_AWT_LIBRARY_DIRECTORIES}
+ )
+
+foreach(search ${_JNI_SEARCHES})
+ find_library(JAVA_JVM_LIBRARY ${_JNI_${search}_JVM})
+ find_library(JAVA_AWT_LIBRARY ${_JNI_${search}_JAWT})
+ if(JAVA_JVM_LIBRARY)
+ break()
+ endif()
+endforeach()
+unset(_JNI_SEARCHES)
+unset(_JNI_FRAMEWORK_JVM)
+unset(_JNI_FRAMEWORK_JAWT)
+unset(_JNI_NORMAL_JVM)
+unset(_JNI_NORMAL_JAWT)
+
+# Find headers matching the library.
+if("${JAVA_JVM_LIBRARY};${JAVA_AWT_LIBRARY};" MATCHES "(/JavaVM.framework|-framework JavaVM);")
+ set(CMAKE_FIND_FRAMEWORK ONLY)
+else()
+ set(CMAKE_FIND_FRAMEWORK NEVER)
+endif()
+
+# add in the include path
+find_path(JAVA_INCLUDE_PATH jni.h
+ ${JAVA_AWT_INCLUDE_DIRECTORIES}
+)
+
+find_path(JAVA_INCLUDE_PATH2 jni_md.h
+ ${JAVA_INCLUDE_PATH}
+ ${JAVA_INCLUDE_PATH}/darwin
+ ${JAVA_INCLUDE_PATH}/win32
+ ${JAVA_INCLUDE_PATH}/linux
+ ${JAVA_INCLUDE_PATH}/freebsd
+ ${JAVA_INCLUDE_PATH}/openbsd
+ ${JAVA_INCLUDE_PATH}/solaris
+ ${JAVA_INCLUDE_PATH}/hp-ux
+ ${JAVA_INCLUDE_PATH}/alpha
+)
+
+find_path(JAVA_AWT_INCLUDE_PATH jawt.h
+ ${JAVA_INCLUDE_PATH}
+)
+
+# Restore CMAKE_FIND_FRAMEWORK
+if(DEFINED _JNI_CMAKE_FIND_FRAMEWORK)
+ set(CMAKE_FIND_FRAMEWORK ${_JNI_CMAKE_FIND_FRAMEWORK})
+ unset(_JNI_CMAKE_FIND_FRAMEWORK)
+else()
+ unset(CMAKE_FIND_FRAMEWORK)
+endif()
+
+#include(${CMAKE_CURRENT_LIST_DIR}/FindPackageHandleStandardArgs.cmake)
+INCLUDE (FindPackageHandleStandardArgs)
+FIND_PACKAGE_HANDLE_STANDARD_ARGS(JNI DEFAULT_MSG JAVA_AWT_LIBRARY JAVA_JVM_LIBRARY
+ JAVA_INCLUDE_PATH JAVA_INCLUDE_PATH2 JAVA_AWT_INCLUDE_PATH)
+
+mark_as_advanced(
+ JAVA_AWT_LIBRARY
+ JAVA_JVM_LIBRARY
+ JAVA_AWT_INCLUDE_PATH
+ JAVA_INCLUDE_PATH
+ JAVA_INCLUDE_PATH2
+)
+
+set(JNI_LIBRARIES
+ ${JAVA_AWT_LIBRARY}
+ ${JAVA_JVM_LIBRARY}
+)
+
+set(JNI_INCLUDE_DIRS
+ ${JAVA_INCLUDE_PATH}
+ ${JAVA_INCLUDE_PATH2}
+ ${JAVA_AWT_INCLUDE_PATH}
+)
+
+message ("JNI_LIBRARIES=${JNI_LIBRARIES}")
+message ("JNI_INCLUDE_DIRS=${JNI_INCLUDE_DIRS}")
diff --git a/config/cmake/HDF4Macros.cmake b/config/cmake/HDF4Macros.cmake
index 5c863c8..2796aa4 100644
--- a/config/cmake/HDF4Macros.cmake
+++ b/config/cmake/HDF4Macros.cmake
@@ -1,5 +1,5 @@
#-------------------------------------------------------------------------------
-MACRO (H4_SET_LIB_OPTIONS libtarget libname libtype)
+macro (H4_SET_LIB_OPTIONS libtarget libname libtype)
set (LIB_OUT_NAME "${libname}")
if (${libtype} MATCHES "SHARED")
if (WIN32)
@@ -22,6 +22,18 @@ MACRO (H4_SET_LIB_OPTIONS libtarget libname libtype)
BUILD_WITH_INSTALL_RPATH ${HDF4_BUILD_WITH_INSTALL_NAME}
)
endif (HDF4_BUILD_WITH_INSTALL_NAME)
+ if (HDF4_BUILD_FRAMEWORKS)
+ if (${libtype} MATCHES "SHARED")
+ # adapt target to build frameworks instead of dylibs
+ set_target_properties(${libtarget} PROPERTIES
+ XCODE_ATTRIBUTE_INSTALL_PATH "@rpath"
+ FRAMEWORK TRUE
+ FRAMEWORK_VERSION ${HDF4_PACKAGE_VERSION_MAJOR}
+ MACOSX_FRAMEWORK_IDENTIFIER org.hdfgroup.${libtarget}
+ MACOSX_FRAMEWORK_SHORT_VERSION_STRING ${HDF4_PACKAGE_VERSION_MAJOR}
+ MACOSX_FRAMEWORK_BUNDLE_VERSION ${HDF4_PACKAGE_VERSION_MAJOR})
+ endif (${libtype} MATCHES "SHARED")
+ endif (HDF4_BUILD_FRAMEWORKS)
endif (APPLE)
-ENDMACRO (H4_SET_LIB_OPTIONS)
+endmacro (H4_SET_LIB_OPTIONS)
diff --git a/config/cmake/HDF4UseFortran.cmake b/config/cmake/HDF4UseFortran.cmake
index febbcbd..6f739ec 100644
--- a/config/cmake/HDF4UseFortran.cmake
+++ b/config/cmake/HDF4UseFortran.cmake
@@ -23,99 +23,4 @@ file (STRINGS ${CMAKE_BINARY_DIR}/F77Mangle.h CONTENTS REGEX "H4_F77_GLOBAL_\\(.
string (REGEX MATCH "H4_F77_GLOBAL_\\(.*,.*\\) +(.*)" RESULT ${CONTENTS})
set (H4_F77_FUNC_ "H4_F77_FUNC_(name,NAME) ${CMAKE_MATCH_1}")
-#-----------------------------------------------------------------------------
-# The provided CMake Fortran macros don't provide a general check function
-# so this one is used for a sizeof test.
-#-----------------------------------------------------------------------------
-MACRO (CHECK_FORTRAN_FEATURE FUNCTION CODE VARIABLE)
- if (NOT DEFINED ${VARIABLE})
- message (STATUS "Testing Fortran ${FUNCTION}")
- if (CMAKE_REQUIRED_LIBRARIES)
- set (CHECK_FUNCTION_EXISTS_ADD_LIBRARIES
- "-DLINK_LIBRARIES:STRING=${CMAKE_REQUIRED_LIBRARIES}")
- else (CMAKE_REQUIRED_LIBRARIES)
- set (CHECK_FUNCTION_EXISTS_ADD_LIBRARIES)
- endif (CMAKE_REQUIRED_LIBRARIES)
- file (WRITE
- ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testFortranCompiler.f
- "${CODE}"
- )
- try_compile (${VARIABLE}
- ${CMAKE_BINARY_DIR}
- ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testFortranCompiler.f
- CMAKE_FLAGS "${CHECK_FUNCTION_EXISTS_ADD_LIBRARIES}"
- OUTPUT_VARIABLE OUTPUT
- )
-
-# message ( "* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * ")
-# message ( "Test result ${OUTPUT}")
-# message ( "* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * ")
-
- if (${VARIABLE})
- set (${VARIABLE} 1 CACHE INTERNAL "Have Fortran function ${FUNCTION}")
- message (STATUS "Testing Fortran ${FUNCTION} - OK")
- file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log
- "Determining if the Fortran ${FUNCTION} exists passed with the following output:\n"
- "${OUTPUT}\n\n"
- )
- else (${VARIABLE})
- message (STATUS "Testing Fortran ${FUNCTION} - Fail")
- set (${VARIABLE} "" CACHE INTERNAL "Have Fortran function ${FUNCTION}")
- file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
- "Determining if the Fortran ${FUNCTION} exists failed with the following output:\n"
- "${OUTPUT}\n\n")
- endif (${VARIABLE})
- endif (NOT DEFINED ${VARIABLE})
-ENDMACRO (CHECK_FORTRAN_FEATURE)
-
-#-----------------------------------------------------------------------------
-# Configure Checks which require Fortran compilation must go in here
-# not in the main ConfigureChecks.cmake files, because if the user has
-# no Fortran compiler, problems arise.
-#
-# Be careful with leading spaces here, do not remove them.
-#-----------------------------------------------------------------------------
-CHECK_FORTRAN_FEATURE(sizeof
- "
- PROGRAM main
- i = sizeof(x)
- END PROGRAM
- "
- FORTRAN_HAVE_SIZEOF
-)
-
-CHECK_FORTRAN_FEATURE(RealIsNotDouble
- "
- MODULE type_mod
- INTERFACE h4t
- MODULE PROCEDURE h4t_real
- MODULE PROCEDURE h4t_dble
- END INTERFACE
- CONTAINS
- SUBROUTINE h4t_real(r)
- REAL :: r
- END SUBROUTINE h4t_real
- SUBROUTINE h4t_dble(d)
- DOUBLE PRECISION :: d
- END SUBROUTINE h4t_dble
- END MODULE type_mod
- PROGRAM main
- USE type_mod
- REAL :: r
- DOUBLE PRECISION :: d
- CALL h4t(r)
- CALL h4t(d)
- END PROGRAM main
- "
- FORTRAN_DEFAULT_REAL_NOT_DOUBLE
-)
-
-#-----------------------------------------------------------------------------
-# Add debug information (intel Fortran : JB)
-#-----------------------------------------------------------------------------
-if (CMAKE_Fortran_COMPILER MATCHES ifort)
- if (WIN32)
- set (CMAKE_Fortran_FLAGS_DEBUG "/debug:full /dbglibs " CACHE "flags" STRING FORCE)
- set (CMAKE_EXE_LINKER_FLAGS_DEBUG "/DEBUG" CACHE "flags" STRING FORCE)
- endif (WIN32)
-endif (CMAKE_Fortran_COMPILER MATCHES ifort)
+include (${HDF_RESOURCES_EXT_DIR}/HDFUseFortran.cmake)
diff --git a/config/cmake/HDF4_Examples.cmake.in b/config/cmake/HDF4_Examples.cmake.in
index 373081a..a8b86c3 100644
--- a/config/cmake/HDF4_Examples.cmake.in
+++ b/config/cmake/HDF4_Examples.cmake.in
@@ -1,47 +1,100 @@
-cmake_minimum_required(VERSION 2.8.10 FATAL_ERROR)
+cmake_minimum_required(VERSION 3.1.0 FATAL_ERROR)
###############################################################################################################
-# This script will build and run the examples from a compressed file
+# This script will build and run the examples from a folder
# Execute from a command line:
-# ctest -S HDF4_Examples.cmake,HDF4Examples-0.1.1-Source -C Release -V -O test.log
+# ctest -S HDF4_Examples.cmake,OPTION=VALUE -C Release -V -O test.log
###############################################################################################################
-set(INSTALLDIR "@CMAKE_INSTALL_PREFIX@")
set(CTEST_CMAKE_GENERATOR "@CMAKE_GENERATOR@")
-set(STATICLIBRARIES "@H4_ENABLE_STATIC_LIB@")
-set(CTEST_SOURCE_NAME ${CTEST_SCRIPT_ARG})
set(CTEST_DASHBOARD_ROOT ${CTEST_SCRIPT_DIRECTORY})
-set(CTEST_BUILD_CONFIGURATION "Release")
-#set(NO_MAC_FORTRAN "true")
-#set(BUILD_OPTIONS "${BUILD_OPTIONS} -DHDF_BUILD_FORTRAN:BOOL=ON")
-set(CTEST_USE_TAR_SOURCE "${CTEST_SCRIPT_ARG}")
+
+# handle input parameters to script.
+#INSTALLDIR - HDF4 root folder
+#CTEST_BUILD_CONFIGURATION - Release, Debug, RelWithDebInfo
+#CTEST_SOURCE_NAME - name of source folder; HDF4Examples
+#STATIC_LIBRARIES - Default is YES
+#FORTRAN_LIBRARIES - Default is NO
+#JAVA_LIBRARIES - Default is NO
+##NO_MAC_FORTRAN - set to TRUE to allow shared libs on a Mac)
+if(DEFINED CTEST_SCRIPT_ARG)
+ # transform ctest script arguments of the form
+ # script.ctest,var1=value1,var2=value2
+ # to variables with the respective names set to the respective values
+ string(REPLACE "," ";" script_args "${CTEST_SCRIPT_ARG}")
+ foreach(current_var ${script_args})
+ if ("${current_var}" MATCHES "^([^=]+)=(.+)$")
+ set("${CMAKE_MATCH_1}" "${CMAKE_MATCH_2}")
+ endif()
+ endforeach()
+endif()
+if(NOT DEFINED INSTALLDIR)
+ set(INSTALLDIR "@CMAKE_INSTALL_PREFIX@")
+endif()
+if(NOT DEFINED CTEST_BUILD_CONFIGURATION)
+ set(CTEST_BUILD_CONFIGURATION "Release")
+endif()
+if(NOT DEFINED CTEST_SOURCE_NAME)
+ set(CTEST_SOURCE_NAME "HDF4Examples")
+endif()
+if(NOT DEFINED STATIC_LIBRARIES)
+ set(STATICLIBRARIES "YES")
+else(NOT DEFINED STATIC_LIBRARIES)
+ set(STATICLIBRARIES "NO")
+endif()
+if(NOT DEFINED FORTRAN_LIBRARIES)
+ set(FORTRANLIBRARIES "NO")
+else(NOT DEFINED FORTRAN_LIBRARIES)
+ set(FORTRANLIBRARIES "YES")
+endif()
+if(NOT DEFINED JAVA_LIBRARIES)
+ set(JAVALIBRARIES "NO")
+else(NOT DEFINED JAVA_LIBRARIES)
+ set(JAVALIBRARIES "YES")
+endif()
+
+#TAR_SOURCE - name of tarfile
+#if(NOT DEFINED TAR_SOURCE)
+# set(CTEST_USE_TAR_SOURCE "HDF4Examples-0.3.1-Source")
+#endif()
###############################################################################################################
# Adjust the following SET Commands as needed
###############################################################################################################
if(WIN32)
- if(STATICLIBRARIES)
- set(BUILD_OPTIONS "${BUILD_OPTIONS} -DUSE_SHARED_LIBS:BOOL=OFF")
- endif(STATICLIBRARIES)
- set(ENV{HDF4_DIR} "${INSTALLDIR}/cmake/hdf4")
+ if(${STATICLIBRARIES})
+ set(BUILD_OPTIONS "${BUILD_OPTIONS} -DBUILD_SHARED_LIBS:BOOL=OFF")
+ endif()
+ set(ENV{HDF4_DIR} "${INSTALLDIR}/cmake")
set(CTEST_BINARY_NAME ${CTEST_SOURCE_NAME}\\build)
set(CTEST_SOURCE_DIRECTORY "${CTEST_DASHBOARD_ROOT}\\${CTEST_SOURCE_NAME}")
set(CTEST_BINARY_DIRECTORY "${CTEST_DASHBOARD_ROOT}\\${CTEST_BINARY_NAME}")
else(WIN32)
- if(STATICLIBRARIES)
- set(BUILD_OPTIONS "${BUILD_OPTIONS} -DUSE_SHARED_LIBS:BOOL=OFF -DCMAKE_ANSI_CFLAGS:STRING=-fPIC")
- endif(STATICLIBRARIES)
- set(ENV{HDF4_DIR} "${INSTALLDIR}/share/cmake/hdf4")
+ if(${STATICLIBRARIES})
+ set(BUILD_OPTIONS "${BUILD_OPTIONS} -DBUILD_SHARED_LIBS:BOOL=OFF -DCMAKE_ANSI_CFLAGS:STRING=-fPIC")
+ endif()
+ set(ENV{HDF4_DIR} "${INSTALLDIR}/share/cmake")
set(ENV{LD_LIBRARY_PATH} "${INSTALLDIR}/lib")
set(CTEST_BINARY_NAME ${CTEST_SOURCE_NAME}/build)
set(CTEST_SOURCE_DIRECTORY "${CTEST_DASHBOARD_ROOT}/${CTEST_SOURCE_NAME}")
set(CTEST_BINARY_DIRECTORY "${CTEST_DASHBOARD_ROOT}/${CTEST_BINARY_NAME}")
endif(WIN32)
+if(${FORTRANLIBRARIES})
+ set(BUILD_OPTIONS "${BUILD_OPTIONS} -DHDF_BUILD_FORTRAN:BOOL=ON")
+else()
+ set(BUILD_OPTIONS "${BUILD_OPTIONS} -DHDF_BUILD_FORTRAN:BOOL=OFF")
+endif()
+if(${JAVALIBRARIES})
+ set(BUILD_OPTIONS "${BUILD_OPTIONS} -DHDF_BUILD_JAVA:BOOL=ON")
+else()
+ set(BUILD_OPTIONS "${BUILD_OPTIONS} -DHDF_BUILD_JAVA:BOOL=OFF")
+endif()
+set(BUILD_OPTIONS "${BUILD_OPTIONS} -DHDF4_PACKAGE_NAME:STRING=@HDF4_PACKAGE@@HDF_PACKAGE_EXT@")
###############################################################################################################
# For any comments please contact cdashhelp at hdfgroup.org
#
###############################################################################################################
-
+
#-----------------------------------------------------------------------------
# MAC machines need special option
#-----------------------------------------------------------------------------
@@ -52,14 +105,14 @@ if(APPLE)
set(ENV{CC} "${XCODE_CC}")
set(ENV{CXX} "${XCODE_CXX}")
if(NOT NO_MAC_FORTRAN)
- # Shared fortran is not supported, build static
+ # Shared fortran is not supported, build static
set(BUILD_OPTIONS "${BUILD_OPTIONS} -DBUILD_SHARED_LIBS:BOOL=OFF -DCMAKE_ANSI_CFLAGS:STRING=-fPIC")
- else(NOT NO_MAC_FORTRAN)
+ else()
set(BUILD_OPTIONS "${BUILD_OPTIONS} -DHDF_BUILD_FORTRAN:BOOL=OFF")
- endif(NOT NO_MAC_FORTRAN)
+ endif()
set(BUILD_OPTIONS "${BUILD_OPTIONS} -DCTEST_USE_LAUNCHERS:BOOL=ON -DCMAKE_BUILD_WITH_INSTALL_RPATH:BOOL=OFF")
-endif(APPLE)
-
+endif()
+
#-----------------------------------------------------------------------------
set(CTEST_CMAKE_COMMAND "\"${CMAKE_COMMAND}\"")
## --------------------------
@@ -68,28 +121,28 @@ if(CTEST_USE_TAR_SOURCE)
## --------------------------
if(WIN32)
message(STATUS "extracting... [${CMAKE_EXECUTABLE_NAME} -E tar -xvf ${CTEST_USE_TAR_SOURCE}.zip]")
- execute_process(COMMAND ${CMAKE_EXECUTABLE_NAME} -E tar -xvf ${CTEST_USE_TAR_SOURCE}.zip RESULT_VARIABLE rv)
- else(WIN32)
+ execute_process(COMMAND ${CMAKE_EXECUTABLE_NAME} -E tar -xvf ${CTEST_DASHBOARD_ROOT}\\${CTEST_USE_TAR_SOURCE}.zip RESULT_VARIABLE rv)
+ else()
message(STATUS "extracting... [${CMAKE_EXECUTABLE_NAME} -E tar -xvf ${CTEST_USE_TAR_SOURCE}.tar]")
- execute_process(COMMAND ${CMAKE_EXECUTABLE_NAME} -E tar -xvf ${CTEST_USE_TAR_SOURCE}.tar RESULT_VARIABLE rv)
- endif(WIN32)
-
+ execute_process(COMMAND ${CMAKE_EXECUTABLE_NAME} -E tar -xvf ${CTEST_DASHBOARD_ROOT}/${CTEST_USE_TAR_SOURCE}.tar RESULT_VARIABLE rv)
+ endif()
+
if(NOT rv EQUAL 0)
message(STATUS "extracting... [error-(${rv}) clean up]")
file(REMOVE_RECURSE "${CTEST_SOURCE_DIRECTORY}")
message(FATAL_ERROR "error: extract of ${CTEST_SOURCE_NAME} failed")
- endif(NOT rv EQUAL 0)
+ endif()
endif(CTEST_USE_TAR_SOURCE)
-
+
#-----------------------------------------------------------------------------
## Clear the build directory
## --------------------------
set(CTEST_START_WITH_EMPTY_BINARY_DIRECTORY TRUE)
if (EXISTS "${CTEST_BINARY_DIRECTORY}" AND IS_DIRECTORY "${CTEST_BINARY_DIRECTORY}")
ctest_empty_binary_directory(${CTEST_BINARY_DIRECTORY})
-else (EXISTS "${CTEST_BINARY_DIRECTORY}" AND IS_DIRECTORY "${CTEST_BINARY_DIRECTORY}")
+else ()
file(MAKE_DIRECTORY "${CTEST_BINARY_DIRECTORY}")
-endif (EXISTS "${CTEST_BINARY_DIRECTORY}" AND IS_DIRECTORY "${CTEST_BINARY_DIRECTORY}")
+endif ()
# Use multiple CPU cores to build
include(ProcessorCount)
@@ -97,28 +150,38 @@ ProcessorCount(N)
if(NOT N EQUAL 0)
if(NOT WIN32)
set(CTEST_BUILD_FLAGS -j${N})
- endif(NOT WIN32)
+ endif()
set(ctest_test_args ${ctest_test_args} PARALLEL_LEVEL ${N})
endif()
set (CTEST_CONFIGURE_COMMAND
"${CTEST_CMAKE_COMMAND} -C \"${CTEST_SOURCE_DIRECTORY}/config/cmake/cacheinit.cmake\" -DCMAKE_BUILD_TYPE:STRING=${CTEST_BUILD_CONFIGURATION} ${BUILD_OPTIONS} \"-G${CTEST_CMAKE_GENERATOR}\" \"${CTEST_SOURCE_DIRECTORY}\""
)
-
+
#-----------------------------------------------------------------------------
## -- set output to english
set($ENV{LC_MESSAGES} "en_EN")
-
+
#-----------------------------------------------------------------------------
- ## NORMAL process
- ## --------------------------
- CTEST_START (Experimental)
- CTEST_CONFIGURE (BUILD "${CTEST_BINARY_DIRECTORY}")
- CTEST_READ_CUSTOM_FILES ("${CTEST_BINARY_DIRECTORY}")
- CTEST_BUILD (BUILD "${CTEST_BINARY_DIRECTORY}" APPEND)
- CTEST_TEST (BUILD "${CTEST_BINARY_DIRECTORY}" APPEND ${ctest_test_args} RETURN_VALUE res)
- if(res GREATER 0)
- message (FATAL_ERROR "tests FAILED")
- endif(res GREATER 0)
+configure_file(${CTEST_SOURCE_DIRECTORY}/config/cmake/CTestCustom.cmake ${CTEST_BINARY_DIRECTORY}/CTestCustom.cmake)
+ctest_read_custom_files ("${CTEST_BINARY_DIRECTORY}")
+## NORMAL process
+## --------------------------
+ctest_start (Experimental)
+ctest_configure (BUILD "${CTEST_BINARY_DIRECTORY}")
+if(LOCAL_SUBMIT)
+ ctest_submit (PARTS Configure Notes)
+endif()
+ctest_build (BUILD "${CTEST_BINARY_DIRECTORY}" APPEND)
+if(LOCAL_SUBMIT)
+ ctest_submit (PARTS Build)
+endif()
+ctest_test (BUILD "${CTEST_BINARY_DIRECTORY}" APPEND ${ctest_test_args} RETURN_VALUE res)
+if(LOCAL_SUBMIT)
+ ctest_submit (PARTS Test)
+endif()
+if(res GREATER 0)
+ message (FATAL_ERROR "tests FAILED")
+endif()
#-----------------------------------------------------------------------------
-##############################################################################################################
+##############################################################################################################
message(STATUS "DONE")
\ No newline at end of file
diff --git a/config/cmake/README.txt.cmake.in b/config/cmake/README.txt.cmake.in
index 5af0de5..bfb3f9e 100644
--- a/config/cmake/README.txt.cmake.in
+++ b/config/cmake/README.txt.cmake.in
@@ -26,22 +26,31 @@ Installation
After Installation
===========================================================================
-The compressed examples file HDF4Examples-0.1.1-Source. at BINARY_EXAMPLE_ENDING@, located in the
+The examples folder, HDF4Examples, located in the
HDF4 install folder, can be built and tested with CMake and the supplied
HDF4_Examples.cmake file. The HDF4_Examples.cmake expects HDF4 to have
-been installed in the default location with above compilers. Also, CMake
-and unzip utilities should be installed.
+been installed in the default location with above compilers. Also, the CMake
+utility should be installed.
To test the installation with the examples;
Create a directory to run the examples.
- Copy HDF4Examples-0.1.1-Source. at BINARY_EXAMPLE_ENDING@ to this directory, do NOT unzip.
+ Copy HDF4Examples folder to this directory.
Copy HDF4_Examples.cmake to this directory.
- Edit HDF4_Examples.cmake line 8 to set INSTALLDIR to where HDF4 is installed.
- NOTE for Windows: comment line 16 with a '#' character if you do not have a command line
- unzip program. Use Windows Explorer to unzip the HDF4Examples-0.1.1-Source. at BINARY_EXAMPLE_ENDING@ file.
- Edit line 9 for CMake 2.x by removing the 4 digit VS version from the Generator value.
- Execute from this directory:
- ctest -S HDF4_Examples.cmake,HDF4Examples-0.1.1-Source -C Release -O test.log -VV
+ The default source folder is defined as "HDF4Examples". It can be changed
+ with the CTEST_SOURCE_NAME script option.
+ The default installation folder is defined as "@CMAKE_INSTALL_PREFIX@".
+ It can be changed with the INSTALLDIR script option.
+ The default ctest configuration is defined as "Release". It can be changed
+ with the CTEST_BUILD_CONFIGURATION script option. Note that this must
+ be the same as the value used with the -C command line option.
+ The default build configuration is defined to build and use static libraries.
+ Shared libraries can be used with the STATICLIBRARIES script option set to "NO".
+ Other options can be changed by editing the HDF4_Examples.cmake file.
+
+ If the defaults are okay, execute from this directory:
+ ctest -S HDF4_Examples.cmake -C Release -V -O test.log
+ If the defaults need change, execute from this directory:
+ ctest -S HDF4_Examples.cmake,CTEST_SOURCE_NAME=MyExamples,INSTALLDIR=MyLocation -C Release -V -O test.log
When executed, the ctest script will save the results to the log file, test.log, as
indicated by the ctest command. If you wish the to see more build and test information,
diff --git a/config/cmake/UseJava.cmake b/config/cmake/UseJava.cmake
new file mode 100644
index 0000000..9b2b1e0
--- /dev/null
+++ b/config/cmake/UseJava.cmake
@@ -0,0 +1,1350 @@
+#.rst:
+# UseJava
+# -------
+#
+# Use Module for Java
+#
+# This file provides functions for Java. It is assumed that
+# FindJava.cmake has already been loaded. See FindJava.cmake for
+# information on how to load Java into your CMake project.
+#
+# ::
+#
+# add_jar(target_name
+# [SOURCES] source1 [source2 ...] [resource1 ...]
+# [INCLUDE_JARS jar1 [jar2 ...]]
+# [ENTRY_POINT entry]
+# [VERSION version]
+# [OUTPUT_NAME name]
+# [OUTPUT_DIR dir]
+# )
+#
+# This command creates a <target_name>.jar. It compiles the given
+# source files (source) and adds the given resource files (resource) to
+# the jar file. Source files can be java files or listing files
+# (prefixed by '@'). If only resource files are given then just a jar file
+# is created. The list of include jars are added to the classpath when
+# compiling the java sources and also to the dependencies of the target.
+# INCLUDE_JARS also accepts other target names created by add_jar. For
+# backwards compatibility, jar files listed as sources are ignored (as
+# they have been since the first version of this module).
+#
+# The default OUTPUT_DIR can also be changed by setting the variable
+# CMAKE_JAVA_TARGET_OUTPUT_DIR.
+#
+# Additional instructions:
+#
+# ::
+#
+# To add compile flags to the target you can set these flags with
+# the following variable:
+#
+#
+#
+# ::
+#
+# set(CMAKE_JAVA_COMPILE_FLAGS -nowarn)
+#
+#
+#
+# ::
+#
+# To add a path or a jar file to the class path you can do this
+# with the CMAKE_JAVA_INCLUDE_PATH variable.
+#
+#
+#
+# ::
+#
+# set(CMAKE_JAVA_INCLUDE_PATH /usr/share/java/shibboleet.jar)
+#
+#
+#
+# ::
+#
+# To use a different output name for the target you can set it with:
+#
+#
+#
+# ::
+#
+# add_jar(foobar foobar.java OUTPUT_NAME shibboleet.jar)
+#
+#
+#
+# ::
+#
+# To use a different output directory than CMAKE_CURRENT_BINARY_DIR
+# you can set it with:
+#
+#
+#
+# ::
+#
+# add_jar(foobar foobar.java OUTPUT_DIR ${PROJECT_BINARY_DIR}/bin)
+#
+#
+#
+# ::
+#
+# To define an entry point in your jar you can set it with the ENTRY_POINT
+# named argument:
+#
+#
+#
+# ::
+#
+# add_jar(example ENTRY_POINT com/examples/MyProject/Main)
+#
+#
+#
+# ::
+#
+# To define a custom manifest for the jar, you can set it with the manifest
+# named argument:
+#
+#
+#
+# ::
+#
+# add_jar(example MANIFEST /path/to/manifest)
+#
+#
+#
+# ::
+#
+# To add a VERSION to the target output name you can set it using
+# the VERSION named argument to add_jar. This will create a jar file with the
+# name shibboleet-1.0.0.jar and will create a symlink shibboleet.jar
+# pointing to the jar with the version information.
+#
+#
+#
+# ::
+#
+# add_jar(shibboleet shibbotleet.java VERSION 1.2.0)
+#
+#
+#
+# ::
+#
+# If the target is a JNI library, utilize the following commands to
+# create a JNI symbolic link:
+#
+#
+#
+# ::
+#
+# set(CMAKE_JNI_TARGET TRUE)
+# add_jar(shibboleet shibbotleet.java VERSION 1.2.0)
+# install_jar(shibboleet ${LIB_INSTALL_DIR}/shibboleet)
+# install_jni_symlink(shibboleet ${JAVA_LIB_INSTALL_DIR})
+#
+#
+#
+# ::
+#
+# If a single target needs to produce more than one jar from its
+# java source code, to prevent the accumulation of duplicate class
+# files in subsequent jars, set/reset CMAKE_JAR_CLASSES_PREFIX prior
+# to calling the add_jar() function:
+#
+#
+#
+# ::
+#
+# set(CMAKE_JAR_CLASSES_PREFIX com/redhat/foo)
+# add_jar(foo foo.java)
+#
+#
+#
+# ::
+#
+# set(CMAKE_JAR_CLASSES_PREFIX com/redhat/bar)
+# add_jar(bar bar.java)
+#
+#
+#
+# Target Properties:
+#
+# ::
+#
+# The add_jar() functions sets some target properties. You can get these
+# properties with the
+# get_property(TARGET <target_name> PROPERTY <propery_name>)
+# command.
+#
+#
+#
+# ::
+#
+# INSTALL_FILES The files which should be installed. This is used by
+# install_jar().
+# JNI_SYMLINK The JNI symlink which should be installed.
+# This is used by install_jni_symlink().
+# JAR_FILE The location of the jar file so that you can include
+# it.
+# CLASSDIR The directory where the class files can be found. For
+# example to use them with javah.
+#
+# ::
+#
+# find_jar(<VAR>
+# name | NAMES name1 [name2 ...]
+# [PATHS path1 [path2 ... ENV var]]
+# [VERSIONS version1 [version2]]
+# [DOC "cache documentation string"]
+# )
+#
+# This command is used to find a full path to the named jar. A cache
+# entry named by <VAR> is created to stor the result of this command.
+# If the full path to a jar is found the result is stored in the
+# variable and the search will not repeated unless the variable is
+# cleared. If nothing is found, the result will be <VAR>-NOTFOUND, and
+# the search will be attempted again next time find_jar is invoked with
+# the same variable. The name of the full path to a file that is
+# searched for is specified by the names listed after NAMES argument.
+# Additional search locations can be specified after the PATHS argument.
+# If you require special a version of a jar file you can specify it with
+# the VERSIONS argument. The argument after DOC will be used for the
+# documentation string in the cache.
+#
+# ::
+#
+# install_jar(target_name destination)
+# install_jar(target_name DESTINATION destination [COMPONENT component])
+#
+# This command installs the TARGET_NAME files to the given DESTINATION.
+# It should be called in the same scope as add_jar() or it will fail.
+#
+# ::
+#
+# install_jni_symlink(target_name destination)
+# install_jni_symlink(target_name DESTINATION destination [COMPONENT component])
+#
+# This command installs the TARGET_NAME JNI symlinks to the given
+# DESTINATION. It should be called in the same scope as add_jar() or it
+# will fail.
+#
+# ::
+#
+# create_javadoc(<VAR>
+# PACKAGES pkg1 [pkg2 ...]
+# [SOURCEPATH <sourcepath>]
+# [CLASSPATH <classpath>]
+# [INSTALLPATH <install path>]
+# [DOCTITLE "the documentation title"]
+# [WINDOWTITLE "the title of the document"]
+# [AUTHOR TRUE|FALSE]
+# [USE TRUE|FALSE]
+# [VERSION TRUE|FALSE]
+# )
+#
+# Create java documentation based on files or packages. For more
+# details please read the javadoc manpage.
+#
+# There are two main signatures for create_javadoc. The first signature
+# works with package names on a path with source files:
+#
+# ::
+#
+# Example:
+# create_javadoc(my_example_doc
+# PACKAGES com.exmaple.foo com.example.bar
+# SOURCEPATH "${CMAKE_CURRENT_SOURCE_DIR}"
+# CLASSPATH ${CMAKE_JAVA_INCLUDE_PATH}
+# WINDOWTITLE "My example"
+# DOCTITLE "<h1>My example</h1>"
+# AUTHOR TRUE
+# USE TRUE
+# VERSION TRUE
+# )
+#
+#
+#
+# The second signature for create_javadoc works on a given list of
+# files.
+#
+# ::
+#
+# create_javadoc(<VAR>
+# FILES file1 [file2 ...]
+# [CLASSPATH <classpath>]
+# [INSTALLPATH <install path>]
+# [DOCTITLE "the documentation title"]
+# [WINDOWTITLE "the title of the document"]
+# [AUTHOR TRUE|FALSE]
+# [USE TRUE|FALSE]
+# [VERSION TRUE|FALSE]
+# )
+#
+#
+#
+# Example:
+#
+# ::
+#
+# create_javadoc(my_example_doc
+# FILES ${example_SRCS}
+# CLASSPATH ${CMAKE_JAVA_INCLUDE_PATH}
+# WINDOWTITLE "My example"
+# DOCTITLE "<h1>My example</h1>"
+# AUTHOR TRUE
+# USE TRUE
+# VERSION TRUE
+# )
+#
+#
+#
+# Both signatures share most of the options. These options are the same
+# as what you can find in the javadoc manpage. Please look at the
+# manpage for CLASSPATH, DOCTITLE, WINDOWTITLE, AUTHOR, USE and VERSION.
+#
+# The documentation will be by default installed to
+#
+# ::
+#
+# ${CMAKE_INSTALL_PREFIX}/share/javadoc/<VAR>
+#
+#
+#
+# if you don't set the INSTALLPATH.
+#
+# ::
+#
+# create_javah(TARGET <target>
+# GENERATED_FILES <VAR>
+# CLASSES <class>...
+# [CLASSPATH <classpath>...]
+# [DEPENDS <depend>...]
+# [OUTPUT_NAME <path>|OUTPUT_DIR <path>]
+# )
+#
+# Create C header files from java classes. These files provide the connective glue
+# that allow your Java and C code to interact.
+#
+# There are two main signatures for create_javah. The first signature
+# returns generated files throught variable specified by GENERATED_FILES option:
+#
+# ::
+#
+# Example:
+# Create_javah(GENERATED_FILES files_headers
+# CLASSES org.cmake.HelloWorld
+# CLASSPATH hello.jar
+# )
+#
+#
+#
+# The second signature for create_javah creates a target which encapsulates
+# header files generation.
+#
+# ::
+#
+# Example:
+# Create_javah(TARGET target_headers
+# CLASSES org.cmake.HelloWorld
+# CLASSPATH hello.jar
+# )
+#
+#
+#
+# Both signatures share same options.
+#
+# ``CLASSES <class>...``
+# Specifies Java classes used to generate headers.
+#
+# ``CLASSPATH <classpath>...``
+# Specifies various paths to look up classes. Here .class files, jar files or targets
+# created by command add_jar can be used.
+#
+# ``DEPENDS <depend>...``
+# Targets on which the javah target depends
+#
+# ``OUTPUT_NAME <path>``
+# Concatenates the resulting header files for all the classes listed by option CLASSES
+# into <path>. Same behavior as option '-o' of javah tool.
+#
+# ``OUTPUT_DIR <path>``
+# Sets the directory where the header files will be generated. Same behavior as option
+# '-d' of javah tool. If not specified, ${CMAKE_CURRENT_BINARY_DIR} is used as output directory.
+
+#=============================================================================
+# Copyright 2013 OpenGamma Ltd. <graham at opengamma.com>
+# Copyright 2010-2011 Andreas schneider <asn at redhat.com>
+# Copyright 2010-2013 Kitware, Inc.
+#
+# Distributed under the OSI-approved BSD License (the "License");
+# see accompanying file Copyright.txt for details.
+#
+# This software is distributed WITHOUT ANY WARRANTY; without even the
+# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the License for more information.
+#=============================================================================
+# (To distribute this file outside of CMake, substitute the full
+# License text for the above reference.)
+
+include(CMakeParseArguments)
+
+function (__java_copy_file src dest comment)
+ add_custom_command(
+ OUTPUT ${dest}
+ COMMAND cmake -E copy_if_different
+ ARGS ${src}
+ ${dest}
+ DEPENDS ${src}
+ COMMENT ${comment})
+endfunction ()
+
+# define helper scripts
+set(_JAVA_CLASS_FILELIST_SCRIPT ${CMAKE_CURRENT_LIST_DIR}/UseJavaClassFilelist.cmake)
+set(_JAVA_SYMLINK_SCRIPT ${CMAKE_CURRENT_LIST_DIR}/UseJavaSymlinks.cmake)
+
+function(add_jar _TARGET_NAME)
+
+ cmake_parse_arguments(_add_jar
+ ""
+ "VERSION;OUTPUT_DIR;OUTPUT_NAME;ENTRY_POINT;MANIFEST"
+ "SOURCES;INCLUDE_JARS"
+ ${ARGN}
+ )
+
+ # In CMake < 2.8.12, add_jar used variables which were set prior to calling
+ # add_jar for customizing the behavior of add_jar. In order to be backwards
+ # compatible, check if any of those variables are set, and use them to
+ # initialize values of the named arguments. (Giving the corresponding named
+ # argument will override the value set here.)
+ #
+ # New features should use named arguments only.
+ if(NOT DEFINED _add_jar_VERSION AND DEFINED CMAKE_JAVA_TARGET_VERSION)
+ set(_add_jar_VERSION "${CMAKE_JAVA_TARGET_VERSION}")
+ endif()
+ if(NOT DEFINED _add_jar_OUTPUT_DIR AND DEFINED CMAKE_JAVA_TARGET_OUTPUT_DIR)
+ set(_add_jar_OUTPUT_DIR "${CMAKE_JAVA_TARGET_OUTPUT_DIR}")
+ endif()
+ if(NOT DEFINED _add_jar_OUTPUT_NAME AND DEFINED CMAKE_JAVA_TARGET_OUTPUT_NAME)
+ set(_add_jar_OUTPUT_NAME "${CMAKE_JAVA_TARGET_OUTPUT_NAME}")
+ # reset
+ set(CMAKE_JAVA_TARGET_OUTPUT_NAME)
+ endif()
+ if(NOT DEFINED _add_jar_ENTRY_POINT AND DEFINED CMAKE_JAVA_JAR_ENTRY_POINT)
+ set(_add_jar_ENTRY_POINT "${CMAKE_JAVA_JAR_ENTRY_POINT}")
+ endif()
+
+ set(_JAVA_SOURCE_FILES ${_add_jar_SOURCES} ${_add_jar_UNPARSED_ARGUMENTS})
+
+ if (NOT DEFINED _add_jar_OUTPUT_DIR)
+ set(_add_jar_OUTPUT_DIR ${CMAKE_CURRENT_BINARY_DIR})
+ endif()
+
+ if (_add_jar_ENTRY_POINT)
+ set(_ENTRY_POINT_OPTION e)
+ set(_ENTRY_POINT_VALUE ${_add_jar_ENTRY_POINT})
+ endif ()
+
+ if (_add_jar_MANIFEST)
+ set(_MANIFEST_OPTION m)
+ get_filename_component (_MANIFEST_VALUE "${_add_jar_MANIFEST}" ABSOLUTE)
+ endif ()
+
+ if (LIBRARY_OUTPUT_PATH)
+ set(CMAKE_JAVA_LIBRARY_OUTPUT_PATH ${LIBRARY_OUTPUT_PATH})
+ else ()
+ set(CMAKE_JAVA_LIBRARY_OUTPUT_PATH ${_add_jar_OUTPUT_DIR})
+ endif ()
+
+ set(CMAKE_JAVA_INCLUDE_PATH
+ ${CMAKE_JAVA_INCLUDE_PATH}
+ ${CMAKE_CURRENT_SOURCE_DIR}
+ ${CMAKE_JAVA_OBJECT_OUTPUT_PATH}
+ ${CMAKE_JAVA_LIBRARY_OUTPUT_PATH}
+ )
+
+ if (CMAKE_HOST_WIN32 AND NOT CYGWIN AND CMAKE_HOST_SYSTEM_NAME MATCHES "Windows")
+ set(CMAKE_JAVA_INCLUDE_FLAG_SEP ";")
+ else ()
+ set(CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
+ endif()
+
+ foreach (JAVA_INCLUDE_DIR ${CMAKE_JAVA_INCLUDE_PATH})
+ set(CMAKE_JAVA_INCLUDE_PATH_FINAL "${CMAKE_JAVA_INCLUDE_PATH_FINAL}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${JAVA_INCLUDE_DIR}")
+ endforeach()
+
+ set(CMAKE_JAVA_CLASS_OUTPUT_PATH "${_add_jar_OUTPUT_DIR}${CMAKE_FILES_DIRECTORY}/${_TARGET_NAME}.dir")
+
+ set(_JAVA_TARGET_OUTPUT_NAME "${_TARGET_NAME}.jar")
+ if (_add_jar_OUTPUT_NAME AND _add_jar_VERSION)
+ set(_JAVA_TARGET_OUTPUT_NAME "${_add_jar_OUTPUT_NAME}-${_add_jar_VERSION}.jar")
+ set(_JAVA_TARGET_OUTPUT_LINK "${_add_jar_OUTPUT_NAME}.jar")
+ elseif (_add_jar_VERSION)
+ set(_JAVA_TARGET_OUTPUT_NAME "${_TARGET_NAME}-${_add_jar_VERSION}.jar")
+ set(_JAVA_TARGET_OUTPUT_LINK "${_TARGET_NAME}.jar")
+ elseif (_add_jar_OUTPUT_NAME)
+ set(_JAVA_TARGET_OUTPUT_NAME "${_add_jar_OUTPUT_NAME}.jar")
+ endif ()
+
+ set(_JAVA_CLASS_FILES)
+ set(_JAVA_COMPILE_FILES)
+ set(_JAVA_COMPILE_FILELISTS)
+ set(_JAVA_DEPENDS)
+ set(_JAVA_COMPILE_DEPENDS)
+ set(_JAVA_RESOURCE_FILES)
+ set(_JAVA_RESOURCE_FILES_RELATIVE)
+ foreach(_JAVA_SOURCE_FILE ${_JAVA_SOURCE_FILES})
+ get_filename_component(_JAVA_EXT ${_JAVA_SOURCE_FILE} EXT)
+ get_filename_component(_JAVA_FILE ${_JAVA_SOURCE_FILE} NAME_WE)
+ get_filename_component(_JAVA_PATH ${_JAVA_SOURCE_FILE} PATH)
+ get_filename_component(_JAVA_FULL ${_JAVA_SOURCE_FILE} ABSOLUTE)
+
+ if (_JAVA_SOURCE_FILE MATCHES "^@(.+)$")
+ get_filename_component(_JAVA_FULL ${CMAKE_MATCH_1} ABSOLUTE)
+ list(APPEND _JAVA_COMPILE_FILELISTS ${_JAVA_FULL})
+
+ elseif (_JAVA_EXT MATCHES ".java")
+ file(RELATIVE_PATH _JAVA_REL_BINARY_PATH ${_add_jar_OUTPUT_DIR} ${_JAVA_FULL})
+ file(RELATIVE_PATH _JAVA_REL_SOURCE_PATH ${CMAKE_CURRENT_SOURCE_DIR} ${_JAVA_FULL})
+ string(LENGTH ${_JAVA_REL_BINARY_PATH} _BIN_LEN)
+ string(LENGTH ${_JAVA_REL_SOURCE_PATH} _SRC_LEN)
+ if (${_BIN_LEN} LESS ${_SRC_LEN})
+ set(_JAVA_REL_PATH ${_JAVA_REL_BINARY_PATH})
+ else ()
+ set(_JAVA_REL_PATH ${_JAVA_REL_SOURCE_PATH})
+ endif ()
+ get_filename_component(_JAVA_REL_PATH ${_JAVA_REL_PATH} PATH)
+
+ list(APPEND _JAVA_COMPILE_FILES ${_JAVA_SOURCE_FILE})
+ set(_JAVA_CLASS_FILE "${CMAKE_JAVA_CLASS_OUTPUT_PATH}/${_JAVA_REL_PATH}/${_JAVA_FILE}.class")
+ set(_JAVA_CLASS_FILES ${_JAVA_CLASS_FILES} ${_JAVA_CLASS_FILE})
+
+ elseif (_JAVA_EXT MATCHES ".jar"
+ OR _JAVA_EXT MATCHES ".war"
+ OR _JAVA_EXT MATCHES ".ear"
+ OR _JAVA_EXT MATCHES ".sar")
+ # Ignored for backward compatibility
+
+ elseif (_JAVA_EXT STREQUAL "")
+ list(APPEND CMAKE_JAVA_INCLUDE_PATH ${JAVA_JAR_TARGET_${_JAVA_SOURCE_FILE}} ${JAVA_JAR_TARGET_${_JAVA_SOURCE_FILE}_CLASSPATH})
+ list(APPEND _JAVA_DEPENDS ${JAVA_JAR_TARGET_${_JAVA_SOURCE_FILE}})
+
+ else ()
+ __java_copy_file(${CMAKE_CURRENT_SOURCE_DIR}/${_JAVA_SOURCE_FILE}
+ ${CMAKE_JAVA_CLASS_OUTPUT_PATH}/${_JAVA_SOURCE_FILE}
+ "Copying ${_JAVA_SOURCE_FILE} to the build directory")
+ list(APPEND _JAVA_RESOURCE_FILES ${CMAKE_JAVA_CLASS_OUTPUT_PATH}/${_JAVA_SOURCE_FILE})
+ list(APPEND _JAVA_RESOURCE_FILES_RELATIVE ${_JAVA_SOURCE_FILE})
+ endif ()
+ endforeach()
+
+ foreach(_JAVA_INCLUDE_JAR ${_add_jar_INCLUDE_JARS})
+ if (TARGET ${_JAVA_INCLUDE_JAR})
+ get_target_property(_JAVA_JAR_PATH ${_JAVA_INCLUDE_JAR} JAR_FILE)
+ if (_JAVA_JAR_PATH)
+ set(CMAKE_JAVA_INCLUDE_PATH_FINAL "${CMAKE_JAVA_INCLUDE_PATH_FINAL}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${_JAVA_JAR_PATH}")
+ list(APPEND CMAKE_JAVA_INCLUDE_PATH ${_JAVA_JAR_PATH})
+ list(APPEND _JAVA_DEPENDS ${_JAVA_INCLUDE_JAR})
+ list(APPEND _JAVA_COMPILE_DEPENDS ${_JAVA_INCLUDE_JAR})
+ else ()
+ message(SEND_ERROR "add_jar: INCLUDE_JARS target ${_JAVA_INCLUDE_JAR} is not a jar")
+ endif ()
+ else ()
+ set(CMAKE_JAVA_INCLUDE_PATH_FINAL "${CMAKE_JAVA_INCLUDE_PATH_FINAL}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${_JAVA_INCLUDE_JAR}")
+ list(APPEND CMAKE_JAVA_INCLUDE_PATH "${_JAVA_INCLUDE_JAR}")
+ list(APPEND _JAVA_DEPENDS "${_JAVA_INCLUDE_JAR}")
+ list(APPEND _JAVA_COMPILE_DEPENDS "${_JAVA_INCLUDE_JAR}")
+ endif ()
+ endforeach()
+
+ # create an empty java_class_filelist
+ if (NOT EXISTS ${CMAKE_JAVA_CLASS_OUTPUT_PATH}/java_class_filelist)
+ file(WRITE ${CMAKE_JAVA_CLASS_OUTPUT_PATH}/java_class_filelist "")
+ endif()
+
+ if (_JAVA_COMPILE_FILES OR _JAVA_COMPILE_FILELISTS)
+ set (_JAVA_SOURCES_FILELISTS)
+
+ if (_JAVA_COMPILE_FILES)
+ # Create the list of files to compile.
+ set(_JAVA_SOURCES_FILE ${CMAKE_JAVA_CLASS_OUTPUT_PATH}/java_sources)
+ string(REPLACE ";" "\"\n\"" _JAVA_COMPILE_STRING "\"${_JAVA_COMPILE_FILES}\"")
+ file(WRITE ${_JAVA_SOURCES_FILE} ${_JAVA_COMPILE_STRING})
+ list (APPEND _JAVA_SOURCES_FILELISTS "@${_JAVA_SOURCES_FILE}")
+ endif()
+ if (_JAVA_COMPILE_FILELISTS)
+ foreach (_JAVA_FILELIST IN LISTS _JAVA_COMPILE_FILELISTS)
+ list (APPEND _JAVA_SOURCES_FILELISTS "@${_JAVA_FILELIST}")
+ endforeach()
+ endif()
+
+ # Compile the java files and create a list of class files
+ add_custom_command(
+ # NOTE: this command generates an artificial dependency file
+ OUTPUT ${CMAKE_JAVA_CLASS_OUTPUT_PATH}/java_compiled_${_TARGET_NAME}
+ COMMAND ${Java_JAVAC_EXECUTABLE}
+ ${CMAKE_JAVA_COMPILE_FLAGS}
+ -classpath "${CMAKE_JAVA_INCLUDE_PATH_FINAL}"
+ -d ${CMAKE_JAVA_CLASS_OUTPUT_PATH}
+ ${_JAVA_SOURCES_FILELISTS}
+ COMMAND ${CMAKE_COMMAND} -E touch ${CMAKE_JAVA_CLASS_OUTPUT_PATH}/java_compiled_${_TARGET_NAME}
+ DEPENDS ${_JAVA_COMPILE_FILES} ${_JAVA_COMPILE_FILELISTS} ${_JAVA_COMPILE_DEPENDS}
+ WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
+ COMMENT "Building Java objects for ${_TARGET_NAME}.jar"
+ )
+ add_custom_command(
+ OUTPUT ${CMAKE_JAVA_CLASS_OUTPUT_PATH}/java_class_filelist
+ COMMAND ${CMAKE_COMMAND}
+ -DCMAKE_JAVA_CLASS_OUTPUT_PATH=${CMAKE_JAVA_CLASS_OUTPUT_PATH}
+ -DCMAKE_JAR_CLASSES_PREFIX="${CMAKE_JAR_CLASSES_PREFIX}"
+ -P ${_JAVA_CLASS_FILELIST_SCRIPT}
+ DEPENDS ${CMAKE_JAVA_CLASS_OUTPUT_PATH}/java_compiled_${_TARGET_NAME}
+ WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
+ )
+ endif ()
+
+ # create the jar file
+ set(_JAVA_JAR_OUTPUT_PATH
+ ${_add_jar_OUTPUT_DIR}/${_JAVA_TARGET_OUTPUT_NAME})
+ if (CMAKE_JNI_TARGET)
+ add_custom_command(
+ OUTPUT ${_JAVA_JAR_OUTPUT_PATH}
+ COMMAND ${Java_JAR_EXECUTABLE}
+ -cf${_ENTRY_POINT_OPTION}${_MANIFEST_OPTION} ${_JAVA_JAR_OUTPUT_PATH} ${_ENTRY_POINT_VALUE} ${_MANIFEST_VALUE}
+ ${_JAVA_RESOURCE_FILES_RELATIVE} @java_class_filelist
+ COMMAND ${CMAKE_COMMAND}
+ -D_JAVA_TARGET_DIR=${_add_jar_OUTPUT_DIR}
+ -D_JAVA_TARGET_OUTPUT_NAME=${_JAVA_TARGET_OUTPUT_NAME}
+ -D_JAVA_TARGET_OUTPUT_LINK=${_JAVA_TARGET_OUTPUT_LINK}
+ -P ${_JAVA_SYMLINK_SCRIPT}
+ COMMAND ${CMAKE_COMMAND}
+ -D_JAVA_TARGET_DIR=${_add_jar_OUTPUT_DIR}
+ -D_JAVA_TARGET_OUTPUT_NAME=${_JAVA_JAR_OUTPUT_PATH}
+ -D_JAVA_TARGET_OUTPUT_LINK=${_JAVA_TARGET_OUTPUT_LINK}
+ -P ${_JAVA_SYMLINK_SCRIPT}
+ DEPENDS ${_JAVA_RESOURCE_FILES} ${_JAVA_DEPENDS} ${CMAKE_JAVA_CLASS_OUTPUT_PATH}/java_class_filelist
+ WORKING_DIRECTORY ${CMAKE_JAVA_CLASS_OUTPUT_PATH}
+ COMMENT "Creating Java archive ${_JAVA_TARGET_OUTPUT_NAME}"
+ )
+ else ()
+ add_custom_command(
+ OUTPUT ${_JAVA_JAR_OUTPUT_PATH}
+ COMMAND ${Java_JAR_EXECUTABLE}
+ -cf${_ENTRY_POINT_OPTION}${_MANIFEST_OPTION} ${_JAVA_JAR_OUTPUT_PATH} ${_ENTRY_POINT_VALUE} ${_MANIFEST_VALUE}
+ ${_JAVA_RESOURCE_FILES_RELATIVE} @java_class_filelist
+ COMMAND ${CMAKE_COMMAND}
+ -D_JAVA_TARGET_DIR=${_add_jar_OUTPUT_DIR}
+ -D_JAVA_TARGET_OUTPUT_NAME=${_JAVA_TARGET_OUTPUT_NAME}
+ -D_JAVA_TARGET_OUTPUT_LINK=${_JAVA_TARGET_OUTPUT_LINK}
+ -P ${_JAVA_SYMLINK_SCRIPT}
+ WORKING_DIRECTORY ${CMAKE_JAVA_CLASS_OUTPUT_PATH}
+ DEPENDS ${_JAVA_RESOURCE_FILES} ${_JAVA_DEPENDS} ${CMAKE_JAVA_CLASS_OUTPUT_PATH}/java_class_filelist
+ COMMENT "Creating Java archive ${_JAVA_TARGET_OUTPUT_NAME}"
+ )
+ endif ()
+
+ # Add the target and make sure we have the latest resource files.
+ add_custom_target(${_TARGET_NAME} ALL DEPENDS ${_JAVA_JAR_OUTPUT_PATH})
+
+ set_property(
+ TARGET
+ ${_TARGET_NAME}
+ PROPERTY
+ INSTALL_FILES
+ ${_JAVA_JAR_OUTPUT_PATH}
+ )
+
+ if (_JAVA_TARGET_OUTPUT_LINK)
+ set_property(
+ TARGET
+ ${_TARGET_NAME}
+ PROPERTY
+ INSTALL_FILES
+ ${_JAVA_JAR_OUTPUT_PATH}
+ ${_add_jar_OUTPUT_DIR}/${_JAVA_TARGET_OUTPUT_LINK}
+ )
+
+ if (CMAKE_JNI_TARGET)
+ set_property(
+ TARGET
+ ${_TARGET_NAME}
+ PROPERTY
+ JNI_SYMLINK
+ ${_add_jar_OUTPUT_DIR}/${_JAVA_TARGET_OUTPUT_LINK}
+ )
+ endif ()
+ endif ()
+
+ set_property(
+ TARGET
+ ${_TARGET_NAME}
+ PROPERTY
+ JAR_FILE
+ ${_JAVA_JAR_OUTPUT_PATH}
+ )
+
+ set_property(
+ TARGET
+ ${_TARGET_NAME}
+ PROPERTY
+ CLASSDIR
+ ${CMAKE_JAVA_CLASS_OUTPUT_PATH}
+ )
+
+endfunction()
+
+function(INSTALL_JAR _TARGET_NAME)
+ if (ARGC EQUAL 2)
+ set (_DESTINATION ${ARGV1})
+ else()
+ cmake_parse_arguments(_install_jar
+ ""
+ "DESTINATION;COMPONENT"
+ ""
+ ${ARGN})
+ if (_install_jar_DESTINATION)
+ set (_DESTINATION ${_install_jar_DESTINATION})
+ else()
+ message(SEND_ERROR "install_jar: ${_TARGET_NAME}: DESTINATION must be specified.")
+ endif()
+
+ if (_install_jar_COMPONENT)
+ set (_COMPONENT COMPONENT ${_install_jar_COMPONENT})
+ endif()
+ endif()
+
+ get_property(__FILES
+ TARGET
+ ${_TARGET_NAME}
+ PROPERTY
+ INSTALL_FILES
+ )
+
+ if (__FILES)
+ install(
+ FILES
+ ${__FILES}
+ DESTINATION
+ ${_DESTINATION}
+ ${_COMPONENT}
+ )
+ else ()
+ message(SEND_ERROR "install_jar: The target ${_TARGET_NAME} is not known in this scope.")
+ endif ()
+endfunction()
+
+function(INSTALL_JNI_SYMLINK _TARGET_NAME)
+ if (ARGC EQUAL 2)
+ set (_DESTINATION ${ARGV1})
+ else()
+ cmake_parse_arguments(_install_jni_symlink
+ ""
+ "DESTINATION;COMPONENT"
+ ""
+ ${ARGN})
+ if (_install_jni_symlink_DESTINATION)
+ set (_DESTINATION ${_install_jni_symlink_DESTINATION})
+ else()
+ message(SEND_ERROR "install_jni_symlink: ${_TARGET_NAME}: DESTINATION must be specified.")
+ endif()
+
+ if (_install_jni_symlink_COMPONENT)
+ set (_COMPONENT COMPONENT ${_install_jni_symlink_COMPONENT})
+ endif()
+ endif()
+
+ get_property(__SYMLINK
+ TARGET
+ ${_TARGET_NAME}
+ PROPERTY
+ JNI_SYMLINK
+ )
+
+ if (__SYMLINK)
+ install(
+ FILES
+ ${__SYMLINK}
+ DESTINATION
+ ${_DESTINATION}
+ ${_COMPONENT}
+ )
+ else ()
+ message(SEND_ERROR "install_jni_symlink: The target ${_TARGET_NAME} is not known in this scope.")
+ endif ()
+endfunction()
+
+function (find_jar VARIABLE)
+ set(_jar_names)
+ set(_jar_files)
+ set(_jar_versions)
+ set(_jar_paths
+ /usr/share/java/
+ /usr/local/share/java/
+ ${Java_JAR_PATHS})
+ set(_jar_doc "NOTSET")
+
+ set(_state "name")
+
+ foreach (arg ${ARGN})
+ if (${_state} STREQUAL "name")
+ if (${arg} STREQUAL "VERSIONS")
+ set(_state "versions")
+ elseif (${arg} STREQUAL "NAMES")
+ set(_state "names")
+ elseif (${arg} STREQUAL "PATHS")
+ set(_state "paths")
+ elseif (${arg} STREQUAL "DOC")
+ set(_state "doc")
+ else ()
+ set(_jar_names ${arg})
+ if (_jar_doc STREQUAL "NOTSET")
+ set(_jar_doc "Finding ${arg} jar")
+ endif ()
+ endif ()
+ elseif (${_state} STREQUAL "versions")
+ if (${arg} STREQUAL "NAMES")
+ set(_state "names")
+ elseif (${arg} STREQUAL "PATHS")
+ set(_state "paths")
+ elseif (${arg} STREQUAL "DOC")
+ set(_state "doc")
+ else ()
+ set(_jar_versions ${_jar_versions} ${arg})
+ endif ()
+ elseif (${_state} STREQUAL "names")
+ if (${arg} STREQUAL "VERSIONS")
+ set(_state "versions")
+ elseif (${arg} STREQUAL "PATHS")
+ set(_state "paths")
+ elseif (${arg} STREQUAL "DOC")
+ set(_state "doc")
+ else ()
+ set(_jar_names ${_jar_names} ${arg})
+ if (_jar_doc STREQUAL "NOTSET")
+ set(_jar_doc "Finding ${arg} jar")
+ endif ()
+ endif ()
+ elseif (${_state} STREQUAL "paths")
+ if (${arg} STREQUAL "VERSIONS")
+ set(_state "versions")
+ elseif (${arg} STREQUAL "NAMES")
+ set(_state "names")
+ elseif (${arg} STREQUAL "DOC")
+ set(_state "doc")
+ else ()
+ set(_jar_paths ${_jar_paths} ${arg})
+ endif ()
+ elseif (${_state} STREQUAL "doc")
+ if (${arg} STREQUAL "VERSIONS")
+ set(_state "versions")
+ elseif (${arg} STREQUAL "NAMES")
+ set(_state "names")
+ elseif (${arg} STREQUAL "PATHS")
+ set(_state "paths")
+ else ()
+ set(_jar_doc ${arg})
+ endif ()
+ endif ()
+ endforeach ()
+
+ if (NOT _jar_names)
+ message(FATAL_ERROR "find_jar: No name to search for given")
+ endif ()
+
+ foreach (jar_name ${_jar_names})
+ foreach (version ${_jar_versions})
+ set(_jar_files ${_jar_files} ${jar_name}-${version}.jar)
+ endforeach ()
+ set(_jar_files ${_jar_files} ${jar_name}.jar)
+ endforeach ()
+
+ find_file(${VARIABLE}
+ NAMES ${_jar_files}
+ PATHS ${_jar_paths}
+ DOC ${_jar_doc}
+ NO_DEFAULT_PATH)
+endfunction ()
+
+function(create_javadoc _target)
+ set(_javadoc_packages)
+ set(_javadoc_files)
+ set(_javadoc_overview)
+ set(_javadoc_sourcepath)
+ set(_javadoc_classpath)
+ set(_javadoc_installpath "${CMAKE_INSTALL_PREFIX}/share/javadoc")
+ set(_javadoc_doctitle)
+ set(_javadoc_windowtitle)
+ set(_javadoc_author FALSE)
+ set(_javadoc_version FALSE)
+ set(_javadoc_use FALSE)
+
+ set(_state "package")
+
+ foreach (arg ${ARGN})
+ if (${_state} STREQUAL "package")
+ if (${arg} STREQUAL "PACKAGES")
+ set(_state "packages")
+ elseif (${arg} STREQUAL "FILES")
+ set(_state "files")
+ elseif (${arg} STREQUAL "SOURCEPATH")
+ set(_state "sourcepath")
+ elseif (${arg} STREQUAL "OVERVIEW")
+ set(_state "overview")
+ elseif (${arg} STREQUAL "CLASSPATH")
+ set(_state "classpath")
+ elseif (${arg} STREQUAL "INSTALLPATH")
+ set(_state "installpath")
+ elseif (${arg} STREQUAL "DOCTITLE")
+ set(_state "doctitle")
+ elseif (${arg} STREQUAL "WINDOWTITLE")
+ set(_state "windowtitle")
+ elseif (${arg} STREQUAL "AUTHOR")
+ set(_state "author")
+ elseif (${arg} STREQUAL "USE")
+ set(_state "use")
+ elseif (${arg} STREQUAL "VERSION")
+ set(_state "version")
+ else ()
+ set(_javadoc_packages ${arg})
+ set(_state "packages")
+ endif ()
+ elseif (${_state} STREQUAL "packages")
+ if (${arg} STREQUAL "FILES")
+ set(_state "files")
+ elseif (${arg} STREQUAL "SOURCEPATH")
+ set(_state "sourcepath")
+ elseif (${arg} STREQUAL "OVERVIEW")
+ set(_state "overview")
+ elseif (${arg} STREQUAL "CLASSPATH")
+ set(_state "classpath")
+ elseif (${arg} STREQUAL "INSTALLPATH")
+ set(_state "installpath")
+ elseif (${arg} STREQUAL "DOCTITLE")
+ set(_state "doctitle")
+ elseif (${arg} STREQUAL "WINDOWTITLE")
+ set(_state "windowtitle")
+ elseif (${arg} STREQUAL "AUTHOR")
+ set(_state "author")
+ elseif (${arg} STREQUAL "USE")
+ set(_state "use")
+ elseif (${arg} STREQUAL "VERSION")
+ set(_state "version")
+ else ()
+ list(APPEND _javadoc_packages ${arg})
+ endif ()
+ elseif (${_state} STREQUAL "files")
+ if (${arg} STREQUAL "PACKAGES")
+ set(_state "packages")
+ elseif (${arg} STREQUAL "SOURCEPATH")
+ set(_state "sourcepath")
+ elseif (${arg} STREQUAL "OVERVIEW")
+ set(_state "overview")
+ elseif (${arg} STREQUAL "CLASSPATH")
+ set(_state "classpath")
+ elseif (${arg} STREQUAL "INSTALLPATH")
+ set(_state "installpath")
+ elseif (${arg} STREQUAL "DOCTITLE")
+ set(_state "doctitle")
+ elseif (${arg} STREQUAL "WINDOWTITLE")
+ set(_state "windowtitle")
+ elseif (${arg} STREQUAL "AUTHOR")
+ set(_state "author")
+ elseif (${arg} STREQUAL "USE")
+ set(_state "use")
+ elseif (${arg} STREQUAL "VERSION")
+ set(_state "version")
+ else ()
+ list(APPEND _javadoc_files ${arg})
+ endif ()
+ elseif (${_state} STREQUAL "sourcepath")
+ if (${arg} STREQUAL "PACKAGES")
+ set(_state "packages")
+ elseif (${arg} STREQUAL "FILES")
+ set(_state "files")
+ elseif (${arg} STREQUAL "OVERVIEW")
+ set(_state "overview")
+ elseif (${arg} STREQUAL "CLASSPATH")
+ set(_state "classpath")
+ elseif (${arg} STREQUAL "INSTALLPATH")
+ set(_state "installpath")
+ elseif (${arg} STREQUAL "DOCTITLE")
+ set(_state "doctitle")
+ elseif (${arg} STREQUAL "WINDOWTITLE")
+ set(_state "windowtitle")
+ elseif (${arg} STREQUAL "AUTHOR")
+ set(_state "author")
+ elseif (${arg} STREQUAL "USE")
+ set(_state "use")
+ elseif (${arg} STREQUAL "VERSION")
+ set(_state "version")
+ else ()
+ list(APPEND _javadoc_sourcepath ${arg})
+ endif ()
+ elseif (${_state} STREQUAL "classpath")
+ if (${arg} STREQUAL "PACKAGES")
+ set(_state "packages")
+ elseif (${arg} STREQUAL "FILES")
+ set(_state "files")
+ elseif (${arg} STREQUAL "SOURCEPATH")
+ set(_state "sourcepath")
+ elseif (${arg} STREQUAL "OVERVIEW")
+ set(_state "overview")
+ elseif (${arg} STREQUAL "INSTALLPATH")
+ set(_state "installpath")
+ elseif (${arg} STREQUAL "DOCTITLE")
+ set(_state "doctitle")
+ elseif (${arg} STREQUAL "WINDOWTITLE")
+ set(_state "windowtitle")
+ elseif (${arg} STREQUAL "AUTHOR")
+ set(_state "author")
+ elseif (${arg} STREQUAL "USE")
+ set(_state "use")
+ elseif (${arg} STREQUAL "VERSION")
+ set(_state "version")
+ else ()
+ list(APPEND _javadoc_classpath ${arg})
+ endif ()
+ elseif (${_state} STREQUAL "installpath")
+ if (${arg} STREQUAL "PACKAGES")
+ set(_state "packages")
+ elseif (${arg} STREQUAL "FILES")
+ set(_state "files")
+ elseif (${arg} STREQUAL "SOURCEPATH")
+ set(_state "sourcepath")
+ elseif (${arg} STREQUAL "OVERVIEW")
+ set(_state "overview")
+ elseif (${arg} STREQUAL "DOCTITLE")
+ set(_state "doctitle")
+ elseif (${arg} STREQUAL "WINDOWTITLE")
+ set(_state "windowtitle")
+ elseif (${arg} STREQUAL "AUTHOR")
+ set(_state "author")
+ elseif (${arg} STREQUAL "USE")
+ set(_state "use")
+ elseif (${arg} STREQUAL "VERSION")
+ set(_state "version")
+ else ()
+ set(_javadoc_installpath ${arg})
+ endif ()
+ elseif (${_state} STREQUAL "doctitle")
+ if (${arg} STREQUAL "PACKAGES")
+ set(_state "packages")
+ elseif (${arg} STREQUAL "FILES")
+ set(_state "files")
+ elseif (${arg} STREQUAL "SOURCEPATH")
+ set(_state "sourcepath")
+ elseif (${arg} STREQUAL "OVERVIEW")
+ set(_state "overview")
+ elseif (${arg} STREQUAL "INSTALLPATH")
+ set(_state "installpath")
+ elseif (${arg} STREQUAL "CLASSPATH")
+ set(_state "classpath")
+ elseif (${arg} STREQUAL "WINDOWTITLE")
+ set(_state "windowtitle")
+ elseif (${arg} STREQUAL "AUTHOR")
+ set(_state "author")
+ elseif (${arg} STREQUAL "USE")
+ set(_state "use")
+ elseif (${arg} STREQUAL "VERSION")
+ set(_state "version")
+ else ()
+ set(_javadoc_doctitle ${arg})
+ endif ()
+ elseif (${_state} STREQUAL "windowtitle")
+ if (${arg} STREQUAL "PACKAGES")
+ set(_state "packages")
+ elseif (${arg} STREQUAL "FILES")
+ set(_state "files")
+ elseif (${arg} STREQUAL "SOURCEPATH")
+ set(_state "sourcepath")
+ elseif (${arg} STREQUAL "OVERVIEW")
+ set(_state "overview")
+ elseif (${arg} STREQUAL "CLASSPATH")
+ set(_state "classpath")
+ elseif (${arg} STREQUAL "INSTALLPATH")
+ set(_state "installpath")
+ elseif (${arg} STREQUAL "DOCTITLE")
+ set(_state "doctitle")
+ elseif (${arg} STREQUAL "AUTHOR")
+ set(_state "author")
+ elseif (${arg} STREQUAL "USE")
+ set(_state "use")
+ elseif (${arg} STREQUAL "VERSION")
+ set(_state "version")
+ else ()
+ set(_javadoc_windowtitle ${arg})
+ endif ()
+ elseif (${_state} STREQUAL "author")
+ if (${arg} STREQUAL "PACKAGES")
+ set(_state "packages")
+ elseif (${arg} STREQUAL "FILES")
+ set(_state "files")
+ elseif (${arg} STREQUAL "SOURCEPATH")
+ set(_state "sourcepath")
+ elseif (${arg} STREQUAL "OVERVIEW")
+ set(_state "overview")
+ elseif (${arg} STREQUAL "CLASSPATH")
+ set(_state "classpath")
+ elseif (${arg} STREQUAL "INSTALLPATH")
+ set(_state "installpath")
+ elseif (${arg} STREQUAL "DOCTITLE")
+ set(_state "doctitle")
+ elseif (${arg} STREQUAL "WINDOWTITLE")
+ set(_state "windowtitle")
+ elseif (${arg} STREQUAL "AUTHOR")
+ set(_state "author")
+ elseif (${arg} STREQUAL "USE")
+ set(_state "use")
+ elseif (${arg} STREQUAL "VERSION")
+ set(_state "version")
+ else ()
+ set(_javadoc_author ${arg})
+ endif ()
+ elseif (${_state} STREQUAL "use")
+ if (${arg} STREQUAL "PACKAGES")
+ set(_state "packages")
+ elseif (${arg} STREQUAL "FILES")
+ set(_state "files")
+ elseif (${arg} STREQUAL "SOURCEPATH")
+ set(_state "sourcepath")
+ elseif (${arg} STREQUAL "OVERVIEW")
+ set(_state "overview")
+ elseif (${arg} STREQUAL "CLASSPATH")
+ set(_state "classpath")
+ elseif (${arg} STREQUAL "INSTALLPATH")
+ set(_state "installpath")
+ elseif (${arg} STREQUAL "DOCTITLE")
+ set(_state "doctitle")
+ elseif (${arg} STREQUAL "WINDOWTITLE")
+ set(_state "windowtitle")
+ elseif (${arg} STREQUAL "AUTHOR")
+ set(_state "author")
+ elseif (${arg} STREQUAL "USE")
+ set(_state "use")
+ elseif (${arg} STREQUAL "VERSION")
+ set(_state "version")
+ else ()
+ set(_javadoc_use ${arg})
+ endif ()
+ elseif (${_state} STREQUAL "version")
+ if (${arg} STREQUAL "PACKAGES")
+ set(_state "packages")
+ elseif (${arg} STREQUAL "FILES")
+ set(_state "files")
+ elseif (${arg} STREQUAL "SOURCEPATH")
+ set(_state "sourcepath")
+ elseif (${arg} STREQUAL "OVERVIEW")
+ set(_state "overview")
+ elseif (${arg} STREQUAL "CLASSPATH")
+ set(_state "classpath")
+ elseif (${arg} STREQUAL "INSTALLPATH")
+ set(_state "installpath")
+ elseif (${arg} STREQUAL "DOCTITLE")
+ set(_state "doctitle")
+ elseif (${arg} STREQUAL "WINDOWTITLE")
+ set(_state "windowtitle")
+ elseif (${arg} STREQUAL "AUTHOR")
+ set(_state "author")
+ elseif (${arg} STREQUAL "USE")
+ set(_state "use")
+ elseif (${arg} STREQUAL "VERSION")
+ set(_state "version")
+ else ()
+ set(_javadoc_version ${arg})
+ endif ()
+ elseif (${_state} STREQUAL "overview")
+ if (${arg} STREQUAL "PACKAGES")
+ set(_state "packages")
+ elseif (${arg} STREQUAL "FILES")
+ set(_state "files")
+ elseif (${arg} STREQUAL "SOURCEPATH")
+ set(_state "sourcepath")
+ elseif (${arg} STREQUAL "CLASSPATH")
+ set(_state "classpath")
+ elseif (${arg} STREQUAL "INSTALLPATH")
+ set(_state "installpath")
+ elseif (${arg} STREQUAL "DOCTITLE")
+ set(_state "doctitle")
+ elseif (${arg} STREQUAL "WINDOWTITLE")
+ set(_state "windowtitle")
+ elseif (${arg} STREQUAL "AUTHOR")
+ set(_state "author")
+ elseif (${arg} STREQUAL "USE")
+ set(_state "use")
+ elseif (${arg} STREQUAL "VERSION")
+ set(_state "version")
+ else ()
+ list(APPEND _javadoc_overview ${arg})
+ endif ()
+ endif ()
+ endforeach ()
+
+ set(_javadoc_builddir ${CMAKE_CURRENT_BINARY_DIR}/javadoc/${_target})
+ set(_javadoc_options -d ${_javadoc_builddir})
+
+ if (_javadoc_sourcepath)
+ set(_start TRUE)
+ foreach(_path ${_javadoc_sourcepath})
+ if (_start)
+ set(_sourcepath ${_path})
+ set(_start FALSE)
+ else ()
+ set(_sourcepath ${_sourcepath}:${_path})
+ endif ()
+ endforeach()
+ set(_javadoc_options ${_javadoc_options} -sourcepath ${_sourcepath})
+ endif ()
+
+ if (_javadoc_overview)
+ set(_start TRUE)
+ foreach(_path ${_javadoc_overview})
+ if (_start)
+ set(_overview ${_path})
+ set(_start FALSE)
+ else ()
+ set(_overview ${_overview}:${_path})
+ endif ()
+ endforeach()
+ set(_javadoc_options ${_javadoc_options} -overview ${_overview})
+ endif ()
+
+ if (_javadoc_classpath)
+ set(_start TRUE)
+ foreach(_path ${_javadoc_classpath})
+ if (_start)
+ set(_classpath ${_path})
+ set(_start FALSE)
+ else ()
+ set(_classpath ${_classpath}:${_path})
+ endif ()
+ endforeach()
+ set(_javadoc_options ${_javadoc_options} -classpath "${_classpath}")
+ endif ()
+
+ if (_javadoc_doctitle)
+ set(_javadoc_options ${_javadoc_options} -doctitle '${_javadoc_doctitle}')
+ endif ()
+
+ if (_javadoc_windowtitle)
+ set(_javadoc_options ${_javadoc_options} -windowtitle '${_javadoc_windowtitle}')
+ endif ()
+
+ if (_javadoc_author)
+ set(_javadoc_options ${_javadoc_options} -author)
+ endif ()
+
+ if (_javadoc_use)
+ set(_javadoc_options ${_javadoc_options} -use)
+ endif ()
+
+ if (_javadoc_version)
+ set(_javadoc_options ${_javadoc_options} -version)
+ endif ()
+
+ add_custom_target(${_target}_javadoc ALL
+ COMMAND ${Java_JAVADOC_EXECUTABLE} ${_javadoc_options}
+ ${_javadoc_files}
+ ${_javadoc_packages}
+ WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
+ )
+
+ install(
+ DIRECTORY ${_javadoc_builddir}
+ DESTINATION ${_javadoc_installpath}
+ )
+endfunction()
+
+function (create_javah)
+ cmake_parse_arguments(_create_javah
+ ""
+ "TARGET;GENERATED_FILES;OUTPUT_NAME;OUTPUT_DIR"
+ "CLASSES;CLASSPATH;DEPENDS"
+ ${ARGN})
+
+ # ckeck parameters
+ if (NOT _create_javah_TARGET AND NOT _create_javah_GENERATED_FILES)
+ message (FATAL_ERROR "create_javah: TARGET or GENERATED_FILES must be specified.")
+ endif()
+ if (_create_javah_OUTPUT_NAME AND _create_javah_OUTPUT_DIR)
+ message (FATAL_ERROR "create_javah: OUTPUT_NAME and OUTPUT_DIR are mutually exclusive.")
+ endif()
+
+ if (NOT _create_javah_CLASSES)
+ message (FATAL_ERROR "create_javah: CLASSES is a required parameter.")
+ endif()
+
+ set (_output_files)
+ if (WIN32 AND NOT CYGWIN AND CMAKE_HOST_SYSTEM_NAME MATCHES "Windows")
+ set(_classpath_sep "$<SEMICOLON>")
+ else ()
+ set(_classpath_sep ":")
+ endif()
+
+ # handle javah options
+ set (_javah_options)
+
+ if (_create_javah_CLASSPATH)
+ # CLASSPATH can specify directories, jar files or targets created with add_jar command
+ set (_classpath)
+ foreach (_path IN LISTS _create_javah_CLASSPATH)
+ if (TARGET ${_path})
+ get_target_property (_jar_path ${_path} JAR_FILE)
+ if (_jar_path)
+ list (APPEND _classpath "${_jar_path}")
+ list (APPEND _create_javah_DEPENDS "${_path}")
+ else()
+ message(SEND_ERROR "create_javah: CLASSPATH target ${_path} is not a jar.")
+ endif()
+ elseif (EXISTS "${_path}")
+ list (APPEND _classpath "${_path}")
+ if (NOT IS_DIRECTORY "${_path}")
+ list (APPEND _create_javah_DEPENDS "${_path}")
+ endif()
+ else()
+ message(SEND_ERROR "create_javah: CLASSPATH entry ${_path} does not exist.")
+ endif()
+ endforeach()
+ string (REPLACE ";" "${_classpath_sep}" _classpath "${_classpath}")
+ list (APPEND _javah_options -classpath "${_classpath}")
+ endif()
+
+ if (_create_javah_OUTPUT_DIR)
+ list (APPEND _javah_options -d "${_create_javah_OUTPUT_DIR}")
+ endif()
+
+ if (_create_javah_OUTPUT_NAME)
+ list (APPEND _javah_options -o "${_create_javah_OUTPUT_NAME}")
+ set (_output_files "${_create_javah_OUTPUT_NAME}")
+
+ get_filename_component (_create_javah_OUTPUT_DIR "${_create_javah_OUTPUT_NAME}" DIRECTORY)
+ get_filename_component (_create_javah_OUTPUT_DIR "${_create_javah_OUTPUT_DIR}" ABSOLUTE)
+ endif()
+
+ if (NOT _create_javah_OUTPUT_DIR)
+ set (_create_javah_OUTPUT_DIR "${CMAKE_CURRENT_BINARY_DIR}")
+ endif()
+
+ if (NOT _create_javah_OUTPUT_NAME)
+ # compute output names
+ foreach (_class IN LISTS _create_javah_CLASSES)
+ string (REPLACE "." "_" _c_header "${_class}")
+ set (_c_header "${_create_javah_OUTPUT_DIR}/${_c_header}.h")
+ list (APPEND _output_files "${_c_header}")
+ endforeach()
+ endif()
+
+ # finalize custom command arguments
+ if (_create_javah_DEPENDS)
+ list (INSERT _create_javah_DEPENDS 0 DEPENDS)
+ endif()
+
+ add_custom_command (OUTPUT ${_output_files}
+ COMMAND "${Java_JAVAH_EXECUTABLE}" ${_javah_options} -jni ${_create_javah_CLASSES}
+ ${_create_javah_DEPENDS}
+ WORKING_DIRECTORY ${_create_javah_OUTPUT_DIR}
+ COMMENT "Building C header files from classes...")
+
+ if (_create_javah_TARGET)
+ add_custom_target (${_create_javah_TARGET} ALL DEPENDS ${_output_files})
+ endif()
+ if (_create_javah_GENERATED_FILES)
+ set (${_create_javah_GENERATED_FILES} ${_output_files} PARENT_SCOPE)
+ endif()
+endfunction()
diff --git a/config/cmake/UseJavaClassFilelist.cmake b/config/cmake/UseJavaClassFilelist.cmake
new file mode 100644
index 0000000..e8e6f01
--- /dev/null
+++ b/config/cmake/UseJavaClassFilelist.cmake
@@ -0,0 +1,58 @@
+#.rst:
+# UseJavaClassFilelist
+# --------------------
+#
+#
+#
+#
+#
+# This script create a list of compiled Java class files to be added to
+# a jar file. This avoids including cmake files which get created in
+# the binary directory.
+
+#=============================================================================
+# Copyright 2010-2011 Andreas schneider <asn at redhat.com>
+#
+# Distributed under the OSI-approved BSD License (the "License");
+# see accompanying file Copyright.txt for details.
+#
+# This software is distributed WITHOUT ANY WARRANTY; without even the
+# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the License for more information.
+#=============================================================================
+# (To distribute this file outside of CMake, substitute the full
+# License text for the above reference.)
+
+if (CMAKE_JAVA_CLASS_OUTPUT_PATH)
+ if (EXISTS "${CMAKE_JAVA_CLASS_OUTPUT_PATH}")
+
+ set(_JAVA_GLOBBED_FILES)
+ if (CMAKE_JAR_CLASSES_PREFIX)
+ foreach(JAR_CLASS_PREFIX ${CMAKE_JAR_CLASSES_PREFIX})
+ message(STATUS "JAR_CLASS_PREFIX: ${JAR_CLASS_PREFIX}")
+
+ file(GLOB_RECURSE _JAVA_GLOBBED_TMP_FILES "${CMAKE_JAVA_CLASS_OUTPUT_PATH}/${JAR_CLASS_PREFIX}/*.class")
+ if (_JAVA_GLOBBED_TMP_FILES)
+ list(APPEND _JAVA_GLOBBED_FILES ${_JAVA_GLOBBED_TMP_FILES})
+ endif ()
+ endforeach()
+ else()
+ file(GLOB_RECURSE _JAVA_GLOBBED_FILES "${CMAKE_JAVA_CLASS_OUTPUT_PATH}/*.class")
+ endif ()
+
+ set(_JAVA_CLASS_FILES)
+ # file(GLOB_RECURSE foo RELATIVE) is broken so we need this.
+ foreach(_JAVA_GLOBBED_FILE ${_JAVA_GLOBBED_FILES})
+ file(RELATIVE_PATH _JAVA_CLASS_FILE ${CMAKE_JAVA_CLASS_OUTPUT_PATH} ${_JAVA_GLOBBED_FILE})
+ set(_JAVA_CLASS_FILES ${_JAVA_CLASS_FILES}${_JAVA_CLASS_FILE}\n)
+ endforeach()
+
+ # write to file
+ file(WRITE ${CMAKE_JAVA_CLASS_OUTPUT_PATH}/java_class_filelist ${_JAVA_CLASS_FILES})
+
+ else ()
+ message(SEND_ERROR "FATAL: Java class output path doesn't exist")
+ endif ()
+else ()
+ message(SEND_ERROR "FATAL: Can't find CMAKE_JAVA_CLASS_OUTPUT_PATH")
+endif ()
diff --git a/config/cmake/UseJavaSymlinks.cmake b/config/cmake/UseJavaSymlinks.cmake
new file mode 100644
index 0000000..90ffdd5
--- /dev/null
+++ b/config/cmake/UseJavaSymlinks.cmake
@@ -0,0 +1,38 @@
+#.rst:
+# UseJavaSymlinks
+# ---------------
+#
+#
+#
+#
+#
+# Helper script for UseJava.cmake
+
+#=============================================================================
+# Copyright 2010-2011 Andreas schneider <asn at redhat.com>
+#
+# Distributed under the OSI-approved BSD License (the "License");
+# see accompanying file Copyright.txt for details.
+#
+# This software is distributed WITHOUT ANY WARRANTY; without even the
+# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the License for more information.
+#=============================================================================
+# (To distribute this file outside of CMake, substitute the full
+# License text for the above reference.)
+
+if (UNIX AND _JAVA_TARGET_OUTPUT_LINK)
+ if (_JAVA_TARGET_OUTPUT_NAME)
+ find_program(LN_EXECUTABLE
+ NAMES
+ ln
+ )
+
+ execute_process(
+ COMMAND ${LN_EXECUTABLE} -sf "${_JAVA_TARGET_OUTPUT_NAME}" "${_JAVA_TARGET_OUTPUT_LINK}"
+ WORKING_DIRECTORY ${_JAVA_TARGET_DIR}
+ )
+ else ()
+ message(SEND_ERROR "FATAL: Can't find _JAVA_TARGET_OUTPUT_NAME")
+ endif ()
+endif ()
diff --git a/config/cmake/cacheinit.cmake b/config/cmake/cacheinit.cmake
index 7ca1bd4..0cb96a8 100644
--- a/config/cmake/cacheinit.cmake
+++ b/config/cmake/cacheinit.cmake
@@ -8,6 +8,8 @@ set (BUILD_SHARED_LIBS ON CACHE BOOL "Build Shared Libraries" FORCE)
set (BUILD_TESTING ON CACHE BOOL "Build HDF4 Unit Testing" FORCE)
+set (CMAKE_INSTALL_FRAMEWORK_PREFIX "Library/Frameworks" CACHE STRING "Frameworks installation directory" FORCE)
+
set (HDF_PACKAGE_EXT "" CACHE STRING "Name of HDF package extension" FORCE)
set (HDF4_BUILD_EXAMPLES ON CACHE BOOL "Build HDF4 Library Examples" FORCE)
@@ -49,12 +51,6 @@ set (HDF4_NO_PACKAGES OFF CACHE BOOL "CPACK - Disable packaging" FORCE)
set (HDF4_ALLOW_EXTERNAL_SUPPORT "NO" CACHE STRING "Allow External Library Building (NO SVN TGZ)" FORCE)
set_property (CACHE HDF4_ALLOW_EXTERNAL_SUPPORT PROPERTY STRINGS NO SVN TGZ)
-set (ZLIB_SVN_URL "http://svn.hdfgroup.uiuc.edu/zlib/trunk" CACHE STRING "Use ZLib from HDF repository" FORCE)
-
-set (SZIP_SVN_URL "http://svn.hdfgroup.uiuc.edu/szip/trunk" CACHE STRING "Use SZip from HDF repository" FORCE)
-
-set (JPEG_SVN_URL "http://svn.hdfgroup.uiuc.edu/jpeg/branches/jpeg8b" CACHE STRING "Use JPEG from HDF repository" FORCE)
-
set (ZLIB_TGZ_NAME "ZLib.tar.gz" CACHE STRING "Use ZLib from compressed file" FORCE)
set (SZIP_TGZ_NAME "SZip.tar.gz" CACHE STRING "Use SZip from compressed file" FORCE)
diff --git a/config/cmake/h4config.h.in b/config/cmake/h4config.h.in
index cb2caee..6c2fdbf 100644
--- a/config/cmake/h4config.h.in
+++ b/config/cmake/h4config.h.in
@@ -7,12 +7,6 @@
libraries. */
#cmakedefine H4_F77_DUMMY_MAIN @H4_F77_DUMMY_MAIN@
-/* Defined if HDF4 was built with CMake AND build as a shared library */
-#cmakedefine H4_BUILT_AS_DYNAMIC_LIB @H4_BUILT_AS_DYNAMIC_LIB@
-
-/* Defined if HDF4 was built with CMake AND build as a static library */
-#cmakedefine H4_BUILT_AS_STATIC_LIB @H4_BUILT_AS_STATIC_LIB@
-
/* Define to a macro mangling the given C identifier (in lower and upper
case), which must not contain underscores, for linking with Fortran. */
#define @H4_F77_FUNC@
diff --git a/config/cmake/hdf4-config-version.cmake.in b/config/cmake/hdf4-config-version.cmake.in
index 3dc2aa9..6aa0691 100644
--- a/config/cmake/hdf4-config-version.cmake.in
+++ b/config/cmake/hdf4-config-version.cmake.in
@@ -1,27 +1,47 @@
#-----------------------------------------------------------------------------
# HDF4 Version file for install directory
#-----------------------------------------------------------------------------
+#
+# The created file sets PACKAGE_VERSION_EXACT if the current version string and
+# the requested version string are exactly the same and it sets
+# PACKAGE_VERSION_COMPATIBLE if the current version is >= requested version,
+# but only if the requested major.minor version is the same as the current one.
+# The variable HDF4_VERSION_STRING must be set before calling configure_file().
-set (PACKAGE_VERSION @HDF4_VERSION_STRING@)
+set (PACKAGE_VERSION "@HDF4_VERSION_STRING@")
-if ("${PACKAGE_FIND_VERSION_MAJOR}" EQUAL @H4_VERS_MAJOR@)
+if("${PACKAGE_VERSION}" VERSION_LESS "${PACKAGE_FIND_VERSION}" )
+ set(PACKAGE_VERSION_COMPATIBLE FALSE)
+else()
+ if ("${PACKAGE_FIND_VERSION_MAJOR}" STREQUAL "@H4_VERS_MAJOR@")
- # exact match for version @H5_VERS_MAJOR at .@H4_VERS_MINOR@
- if ("${PACKAGE_FIND_VERSION_MINOR}" EQUAL @H4_VERS_MINOR@)
+ # exact match for version @H5_VERS_MAJOR at .@H4_VERS_MINOR@
+ if ("${PACKAGE_FIND_VERSION_MINOR}" STREQUAL "@H4_VERS_MINOR@")
- # compatible with any version @H4_VERS_MAJOR at .@H4_VERS_MINOR at .x
- set (PACKAGE_VERSION_COMPATIBLE 1)
+ # compatible with any version @H4_VERS_MAJOR at .@H4_VERS_MINOR at .x
+ set (PACKAGE_VERSION_COMPATIBLE TRUE)
- if ("${PACKAGE_FIND_VERSION_PATCH}" EQUAL @H4_VERS_RELEASE@)
- set (PACKAGE_VERSION_EXACT 1)
+ if ("${PACKAGE_FIND_VERSION_PATCH}" STREQUAL "@H4_VERS_RELEASE@")
+ set (PACKAGE_VERSION_EXACT TRUE)
- if ("${PACKAGE_FIND_VERSION_TWEAK}" EQUAL @H4_VERS_SUBRELEASE@)
- # not using this yet
- endif ("${PACKAGE_FIND_VERSION_TWEAK}" EQUAL @H4_VERS_SUBRELEASE@)
-
- endif ("${PACKAGE_FIND_VERSION_PATCH}" EQUAL @H4_VERS_RELEASE@)
-
- endif ("${PACKAGE_FIND_VERSION_MINOR}" EQUAL @H4_VERS_MINOR@)
-endif ("${PACKAGE_FIND_VERSION_MAJOR}" EQUAL @H4_VERS_MAJOR@)
+ if ("${PACKAGE_FIND_VERSION_TWEAK}" STREQUAL "@H4_VERS_SUBRELEASE@")
+ # not using this yet
+ endif ("${PACKAGE_FIND_VERSION_TWEAK}" STREQUAL "@H4_VERS_SUBRELEASE@")
+ endif ("${PACKAGE_FIND_VERSION_PATCH}" STREQUAL "@H4_VERS_RELEASE@")
+ else ("${PACKAGE_FIND_VERSION_MINOR}" STREQUAL "@H4_VERS_MINOR@")
+ set (PACKAGE_VERSION_COMPATIBLE FALSE)
+ endif ("${PACKAGE_FIND_VERSION_MINOR}" STREQUAL "@H4_VERS_MINOR@")
+ endif ("${PACKAGE_FIND_VERSION_MAJOR}" STREQUAL "@H4_VERS_MAJOR@")
+endif()
+# if the installed or the using project don't have CMAKE_SIZEOF_VOID_P set, ignore it:
+if("${CMAKE_SIZEOF_VOID_P}" STREQUAL "" OR "@CMAKE_SIZEOF_VOID_P@" STREQUAL "")
+ return()
+endif()
+# check that the installed version has the same 32/64bit-ness as the one which is currently searching:
+if(NOT "${CMAKE_SIZEOF_VOID_P}" STREQUAL "@CMAKE_SIZEOF_VOID_P@")
+ math(EXPR installedBits "@CMAKE_SIZEOF_VOID_P@ * 8")
+ set(PACKAGE_VERSION "${PACKAGE_VERSION} (${installedBits}bit)")
+ set(PACKAGE_VERSION_UNSUITABLE TRUE)
+endif()
diff --git a/config/cmake/hdf4-config.cmake.build.in b/config/cmake/hdf4-config.cmake.build.in
deleted file mode 100644
index a039784..0000000
--- a/config/cmake/hdf4-config.cmake.build.in
+++ /dev/null
@@ -1,61 +0,0 @@
-#-----------------------------------------------------------------------------
-# HDF4 Config file for compiling against hdf4 build directory
-#-----------------------------------------------------------------------------
-GET_FILENAME_COMPONENT (SELF_DIR "${CMAKE_CURRENT_LIST_FILE}" PATH)
-
-#-----------------------------------------------------------------------------
-# User Options
-#-----------------------------------------------------------------------------
-set (HDF4_ENABLE_PARALLEL @HDF4_ENABLE_PARALLEL@)
-set (HDF4_BUILD_FORTRAN @HDF4_BUILD_FORTRAN@)
-set (HDF4_BUILD_XDR_LIB @HDF4_BUILD_XDR_LIB@)
-set (HDF4_BUILD_TOOLS @HDF4_BUILD_TOOLS@)
-set (HDF4_BUILD_UTILS @HDF4_BUILD_UTILS@)
-set (HDF4_ENABLE_JPEG_LIB_SUPPORT @HDF4_ENABLE_JPEG_LIB_SUPPORT@)
-set (HDF4_ENABLE_Z_LIB_SUPPORT @HDF4_ENABLE_Z_LIB_SUPPORT@)
-set (HDF4_ENABLE_SZIP_SUPPORT @HDF4_ENABLE_SZIP_SUPPORT@)
-set (HDF4_ENABLE_SZIP_ENCODING @HDF4_ENABLE_SZIP_ENCODING@)
-set (HDF4_BUILD_SHARED_LIBS @BUILD_SHARED_LIBS@)
-
-#-----------------------------------------------------------------------------
-# Directories
-#-----------------------------------------------------------------------------
-set (HDF4_INCLUDE_DIR "@HDF4_INCLUDES_BUILD_TIME@")
-
-if (HDF4_BUILD_FORTRAN)
- set (HDF4_INCLUDE_DIR_FORTRAN "@CMAKE_Fortran_MODULE_DIRECTORY@" )
-endif (HDF4_BUILD_FORTRAN)
-
-if (HDF4_BUILD_XDR_LIB)
- set (HDF4_INCLUDE_DIR_XDR ${HDF4_INCLUDE_DIR} )
-endif (HDF4_BUILD_XDR_LIB)
-
-if (HDF4_BUILD_TOOLS)
- set (HDF4_INCLUDE_DIR_TOOLS ${HDF4_INCLUDE_DIR} )
-endif (HDF4_BUILD_TOOLS)
-
-if (HDF4_BUILD_UTILS)
- set (HDF4_INCLUDE_DIR_UTILS ${HDF4_INCLUDE_DIR} )
-endif (HDF4_BUILD_UTILS)
-
-if (HDF4_BUILD_SHARED_LIBS)
- set (H4_BUILT_AS_DYNAMIC_LIB 1 )
-else (HDF4_BUILD_SHARED_LIBS)
- set (H4_BUILT_AS_STATIC_LIB 1 )
-endif (HDF4_BUILD_SHARED_LIBS)
-
-#-----------------------------------------------------------------------------
-# Version Strings
-#-----------------------------------------------------------------------------
-set (HDF4_VERSION_STRING @HDF4_VERSION_STRING@)
-set (HDF4_VERSION_MAJOR @HDF4_VERSION_MAJOR@)
-set (HDF4_VERSION_MINOR @HDF4_VERSION_MINOR@)
-
-#-----------------------------------------------------------------------------
-# Don't include targets if this file is being picked up by another
-# project which has already build hdf4 as a subproject
-#-----------------------------------------------------------------------------
-if (NOT TARGET "@HDF4_PACKAGE@")
- include (${SELF_DIR}/@HDF4_PACKAGE@@HDF_PACKAGE_EXT at -targets.cmake)
- set (HDF4_LIBRARIES "@HDF4_LIBRARIES_TO_EXPORT@")
-endif (NOT TARGET "@HDF4_PACKAGE@")
diff --git a/config/cmake/hdf4-config.cmake.in b/config/cmake/hdf4-config.cmake.in
new file mode 100644
index 0000000..df3ad23
--- /dev/null
+++ b/config/cmake/hdf4-config.cmake.in
@@ -0,0 +1,148 @@
+#-----------------------------------------------------------------------------
+# HDF4 Config file for compiling against hdf4 build/install directory
+#-----------------------------------------------------------------------------
+ at PACKAGE_INIT@
+
+string(TOUPPER @HDF4_PACKAGE@ HDF4_PACKAGE_NAME)
+
+set (${HDF4_PACKAGE_NAME}_VALID_COMPONENTS
+ static
+ shared
+ C
+ Fortran
+ Java
+)
+
+#-----------------------------------------------------------------------------
+# User Options
+#-----------------------------------------------------------------------------
+set (${HDF4_PACKAGE_NAME}_ENABLE_PARALLEL @HDF4_ENABLE_PARALLEL@)
+set (${HDF4_PACKAGE_NAME}_BUILD_FORTRAN @HDF4_BUILD_FORTRAN@)
+set (${HDF4_PACKAGE_NAME}_BUILD_JAVA @HDF4_BUILD_JAVA@)
+set (${HDF4_PACKAGE_NAME}_BUILD_XDR_LIB @HDF4_BUILD_XDR_LIB@)
+set (${HDF4_PACKAGE_NAME}_BUILD_TOOLS @HDF4_BUILD_TOOLS@)
+set (${HDF4_PACKAGE_NAME}_BUILD_UTILS @HDF4_BUILD_UTILS@)
+set (${HDF4_PACKAGE_NAME}_ENABLE_JPEG_LIB_SUPPORT @HDF4_ENABLE_JPEG_LIB_SUPPORT@)
+set (${HDF4_PACKAGE_NAME}_ENABLE_Z_LIB_SUPPORT @HDF4_ENABLE_Z_LIB_SUPPORT@)
+set (${HDF4_PACKAGE_NAME}_ENABLE_SZIP_SUPPORT @HDF4_ENABLE_SZIP_SUPPORT@)
+set (${HDF4_PACKAGE_NAME}_ENABLE_SZIP_ENCODING @HDF4_ENABLE_SZIP_ENCODING@)
+set (${HDF4_PACKAGE_NAME}_BUILD_SHARED_LIBS @BUILD_SHARED_LIBS@)
+set (${HDF4_PACKAGE_NAME}_PACKAGE_EXTLIBS @HDF4_PACKAGE_EXTLIBS@)
+set (${HDF4_PACKAGE_NAME}_EXPORT_LIBRARIES @HDF4_LIBRARIES_TO_EXPORT@)
+
+#-----------------------------------------------------------------------------
+# Dependencies
+#-----------------------------------------------------------------------------
+if (${HDF4_PACKAGE_NAME}_BUILD_JAVA)
+ set (${HDF4_PACKAGE_NAME}_JAVA_INCLUDE_DIRS
+ @PACKAGE_CURRENT_BUILD_DIR@/lib/jarhdf- at HDF4_VERSION_STRING@.jar
+ @PACKAGE_CURRENT_BUILD_DIR@/lib/slf4j-api-1.7.5.jar
+ @PACKAGE_CURRENT_BUILD_DIR@/lib/slf4j-nop-1.7.5.jar
+ )
+ set (${HDF4_PACKAGE_NAME}_JAVA_LIBRARY "@PACKAGE_CURRENT_BUILD_DIR@/lib")
+ set (${HDF4_PACKAGE_NAME}_JAVA_LIBRARIES "${${HDF4_PACKAGE_NAME}_JAVA_LIBRARY}")
+endif()
+
+#-----------------------------------------------------------------------------
+# Directories
+#-----------------------------------------------------------------------------
+set (${HDF4_PACKAGE_NAME}_INCLUDE_DIR "@PACKAGE_INCLUDE_INSTALL_DIR@" "${${HDF4_PACKAGE_NAME}_MPI_C_INCLUDE_PATH}" )
+
+set (${HDF4_PACKAGE_NAME}_SHARE_DIR "@PACKAGE_SHARE_INSTALL_DIR@")
+set_and_check (${HDF4_PACKAGE_NAME}_BUILD_DIR "@PACKAGE_CURRENT_BUILD_DIR@")
+
+if (${HDF4_PACKAGE_NAME}_BUILD_FORTRAN)
+ set (${HDF4_PACKAGE_NAME}_INCLUDE_DIR_FORTRAN "@PACKAGE_INCLUDE_INSTALL_DIR@" )
+endif ()
+
+if (${HDF4_PACKAGE_NAME}_BUILD_TOOLS)
+ set (${HDF4_PACKAGE_NAME}_INCLUDE_DIR_TOOLS "@PACKAGE_INCLUDE_INSTALL_DIR@" )
+ set_and_check (${HDF4_PACKAGE_NAME}_TOOLS_DIR "@PACKAGE_CURRENT_BUILD_DIR@/bin" )
+endif ()
+
+
+if (${HDF4_PACKAGE_NAME}_BUILD_UTILS)
+ set (${HDF4_PACKAGE_NAME}_INCLUDE_DIR_UTILS "@PACKAGE_INCLUDE_INSTALL_DIR@" )
+ set_and_check (${HDF4_PACKAGE_NAME}_UTILS_DIR "@PACKAGE_CURRENT_BUILD_DIR@/bin" )
+endif ()
+
+#-----------------------------------------------------------------------------
+# Version Strings
+#-----------------------------------------------------------------------------
+set (HDF4_VERSION_STRING @HDF4_VERSION_STRING@)
+set (HDF4_VERSION_MAJOR @HDF4_VERSION_MAJOR@)
+set (HDF4_VERSION_MINOR @HDF4_VERSION_MINOR@)
+
+#-----------------------------------------------------------------------------
+# Don't include targets if this file is being picked up by another
+# project which has already built hdf4 as a subproject
+#-----------------------------------------------------------------------------
+if (NOT TARGET "@HDF4_PACKAGE@")
+ if (${HDF4_PACKAGE_NAME}_ENABLE_JPEG_LIB_SUPPORT AND ${HDF4_PACKAGE_NAME}_PACKAGE_EXTLIBS AND NOT TARGET "jpeg")
+ include (@PACKAGE_SHARE_INSTALL_DIR@/@JPEG_PACKAGE_NAME@@HDF_PACKAGE_EXT at -targets.cmake)
+ endif ()
+ if (${HDF4_PACKAGE_NAME}_ENABLE_Z_LIB_SUPPORT AND ${HDF4_PACKAGE_NAME}_PACKAGE_EXTLIBS AND NOT TARGET "zlib")
+ include (@PACKAGE_SHARE_INSTALL_DIR@/@ZLIB_PACKAGE_NAME@@HDF_PACKAGE_EXT at -targets.cmake)
+ endif ()
+ if (${HDF4_PACKAGE_NAME}_ENABLE_SZIP_SUPPORT AND ${HDF4_PACKAGE_NAME}_PACKAGE_EXTLIBS AND NOT TARGET "szip")
+ include (@PACKAGE_SHARE_INSTALL_DIR@/@SZIP_PACKAGE_NAME@@HDF_PACKAGE_EXT at -targets.cmake)
+ endif ()
+ include (@PACKAGE_SHARE_INSTALL_DIR@/@HDF4_PACKAGE@@HDF_PACKAGE_EXT at -targets.cmake)
+endif ()
+
+# Handle default component(static) :
+if (NOT ${HDF4_PACKAGE_NAME}_FIND_COMPONENTS)
+ set (${HDF4_PACKAGE_NAME}_LIB_TYPE)
+ set (${HDF4_PACKAGE_NAME}_FIND_COMPONENTS C static)
+ set (${HDF4_PACKAGE_NAME}_FIND_REQUIRED_static_C true)
+endif ()
+
+# Handle requested components:
+list (REMOVE_DUPLICATES ${HDF4_PACKAGE_NAME}_FIND_COMPONENTS)
+foreach (comp IN LISTS ${HDF4_PACKAGE_NAME}_FIND_COMPONENTS)
+ if (${comp} STREQUAL "shared")
+ list (REMOVE_ITEM ${HDF4_PACKAGE_NAME}_FIND_COMPONENTS ${comp})
+ set (${HDF4_PACKAGE_NAME}_LIB_TYPE ${${HDF4_PACKAGE_NAME}_LIB_TYPE} ${comp})
+ elseif (${comp} STREQUAL "static")
+ list (REMOVE_ITEM ${HDF4_PACKAGE_NAME}_FIND_COMPONENTS ${comp})
+ set (${HDF4_PACKAGE_NAME}_LIB_TYPE ${${HDF4_PACKAGE_NAME}_LIB_TYPE} ${comp})
+ endif ()
+endforeach ()
+foreach (libtype IN LISTS ${HDF4_PACKAGE_NAME}_LIB_TYPE)
+ foreach (comp IN LISTS ${HDF4_PACKAGE_NAME}_FIND_COMPONENTS)
+ set (hdf4_comp2)
+ if (${comp} STREQUAL "C")
+ set (hdf4_comp "hdf")
+ elseif (${comp} STREQUAL "Java")
+ set (hdf4_comp "hdf4_java")
+ elseif (${comp} STREQUAL "Fortran")
+ set (hdf4_comp2 "hdf_fcstub")
+ set (hdf4_comp "hdf_fortran")
+ endif ()
+ list (FIND ${HDF4_PACKAGE_NAME}_EXPORT_LIBRARIES "${hdf4_comp}-${libtype}" HAVE_COMP)
+ list (FIND ${HDF4_PACKAGE_NAME}_EXPORT_LIBRARIES "mf${hdf4_comp}-${libtype}" HAVE_MFCOMP)
+ if (${HAVE_COMP} LESS 0 OR ${HAVE_MFCOMP} LESS 0)
+ set (${HDF4_PACKAGE_NAME}_${libtype}_${comp}_FOUND 0)
+ else ()
+ if (hdf4_comp2)
+ list (FIND ${HDF4_PACKAGE_NAME}_EXPORT_LIBRARIES "${hdf4_comp2}-${libtype}" HAVE_COMP2)
+ list (FIND ${HDF4_PACKAGE_NAME}_EXPORT_LIBRARIES "mf${hdf4_comp2}-${libtype}" HAVE_MFCOMP2)
+ if (${HAVE_COMP2} LESS 0 OR ${HAVE_MFCOMP2} LESS 0)
+ set (${HDF4_PACKAGE_NAME}_${libtype}_${comp}_FOUND 0)
+ else ()
+ set (${HDF4_PACKAGE_NAME}_${libtype}_${comp}_FOUND 1)
+ string(TOUPPER ${HDF4_PACKAGE_NAME}_${comp}_${libtype}_LIBRARY COMP_LIBRARY)
+ set (${COMP_LIBRARY} ${${COMP_LIBRARY}} ${hdf4_comp2}-${libtype} ${hdf4_comp}-${libtype} mf${hdf4_comp2}-${libtype} mf${hdf4_comp}-${libtype})
+ endif ()
+ else ()
+ set (${HDF4_PACKAGE_NAME}_${libtype}_${comp}_FOUND 1)
+ string(TOUPPER ${HDF4_PACKAGE_NAME}_${comp}_${libtype}_LIBRARY COMP_LIBRARY)
+ set (${COMP_LIBRARY} ${${COMP_LIBRARY}} ${hdf4_comp}-${libtype} mf${hdf4_comp}-${libtype})
+ endif ()
+ endif ()
+ endforeach ()
+endforeach ()
+
+foreach (libtype IN LISTS ${HDF4_PACKAGE_NAME}_LIB_TYPE)
+ check_required_components(${HDF4_PACKAGE_NAME}_${libtype})
+endforeach ()
diff --git a/config/cmake/hdf4-config.cmake.install.in b/config/cmake/hdf4-config.cmake.install.in
deleted file mode 100644
index 15ce3ec..0000000
--- a/config/cmake/hdf4-config.cmake.install.in
+++ /dev/null
@@ -1,71 +0,0 @@
-#-----------------------------------------------------------------------------
-# HDF4 Config file for compiling against hdf4 install directory
-#-----------------------------------------------------------------------------
-GET_FILENAME_COMPONENT (SELF_DIR "${CMAKE_CURRENT_LIST_FILE}" PATH)
-GET_FILENAME_COMPONENT(_IMPORT_PREFIX "${SELF_DIR}" PATH)
-GET_FILENAME_COMPONENT(_IMPORT_PREFIX "${_IMPORT_PREFIX}" PATH)
-if (NOT WIN32)
- GET_FILENAME_COMPONENT(_IMPORT_PREFIX "${_IMPORT_PREFIX}" PATH)
-endif (NOT WIN32)
-
-#-----------------------------------------------------------------------------
-# User Options
-#-----------------------------------------------------------------------------
-set (HDF4_ENABLE_PARALLEL @HDF4_ENABLE_PARALLEL@)
-set (HDF4_BUILD_FORTRAN @HDF4_BUILD_FORTRAN@)
-set (HDF4_BUILD_XDR_LIB @HDF4_BUILD_XDR_LIB@)
-set (HDF4_BUILD_TOOLS @HDF4_BUILD_TOOLS@)
-set (HDF4_BUILD_UTILS @HDF4_BUILD_UTILS@)
-set (HDF4_ENABLE_JPEG_LIB_SUPPORT @HDF4_ENABLE_JPEG_LIB_SUPPORT@)
-set (HDF4_ENABLE_Z_LIB_SUPPORT @HDF4_ENABLE_Z_LIB_SUPPORT@)
-set (HDF4_ENABLE_SZIP_SUPPORT @HDF4_ENABLE_SZIP_SUPPORT@)
-set (HDF4_ENABLE_SZIP_ENCODING @HDF4_ENABLE_SZIP_ENCODING@)
-set (HDF4_BUILD_SHARED_LIBS @BUILD_SHARED_LIBS@)
-set (HDF4_PACKAGE_EXTLIBS @HDF4_PACKAGE_EXTLIBS@)
-
-#-----------------------------------------------------------------------------
-# Directories
-#-----------------------------------------------------------------------------
-set (HDF4_INCLUDE_DIR "${_IMPORT_PREFIX}/include")
-
-if (HDF4_BUILD_FORTRAN)
- set (HDF4_INCLUDE_DIR_FORTRAN "${_IMPORT_PREFIX}/include")
-endif (HDF4_BUILD_FORTRAN)
-
-if (HDF4_BUILD_XDR_LIB)
- set (HDF4_INCLUDE_DIR_XDR "${_IMPORT_PREFIX}/include")
-endif (HDF4_BUILD_XDR_LIB)
-
-if (HDF4_BUILD_TOOLS)
- set (HDF4_INCLUDE_DIR_TOOLS "${_IMPORT_PREFIX}/include")
-endif (HDF4_BUILD_TOOLS)
-
-if (HDF4_BUILD_UTILS)
- set (HDF4_INCLUDE_DIR_UTILS "${_IMPORT_PREFIX}/include")
- set (HDF4_TOOLS_DIR "${_IMPORT_PREFIX}/bin" )
-endif (HDF4_BUILD_UTILS)
-
-#-----------------------------------------------------------------------------
-# Version Strings
-#-----------------------------------------------------------------------------
-set (HDF4_VERSION_STRING @HDF4_VERSION_STRING@)
-set (HDF4_VERSION_MAJOR @HDF4_VERSION_MAJOR@)
-set (HDF4_VERSION_MINOR @HDF4_VERSION_MINOR@)
-
-#-----------------------------------------------------------------------------
-# Don't include targets if this file is being picked up by another
-# project which has already built hdf4 as a subproject
-#-----------------------------------------------------------------------------
-if (NOT TARGET "@HDF4_PACKAGE@")
- if (HDF4_ENABLE_JPEG_LIB_SUPPORT AND HDF4_PACKAGE_EXTLIBS AND NOT TARGET "jpeg")
- include (${SELF_DIR}/../JPEG/@JPEG_PACKAGE_NAME@@HDF_PACKAGE_EXT at -targets.cmake)
- endif (HDF4_ENABLE_JPEG_LIB_SUPPORT AND HDF4_PACKAGE_EXTLIBS AND NOT TARGET "jpeg")
- if (HDF4_ENABLE_Z_LIB_SUPPORT AND HDF4_PACKAGE_EXTLIBS AND NOT TARGET "zlib")
- include (${SELF_DIR}/../ZLIB/@ZLIB_PACKAGE_NAME@@HDF_PACKAGE_EXT at -targets.cmake)
- endif (HDF4_ENABLE_Z_LIB_SUPPORT AND HDF4_PACKAGE_EXTLIBS AND NOT TARGET "zlib")
- if (HDF4_ENABLE_SZIP_SUPPORT AND HDF4_PACKAGE_EXTLIBS AND NOT TARGET "szip")
- include (${SELF_DIR}/../SZIP/@SZIP_PACKAGE_NAME@@HDF_PACKAGE_EXT at -targets.cmake)
- endif (HDF4_ENABLE_SZIP_SUPPORT AND HDF4_PACKAGE_EXTLIBS AND NOT TARGET "szip")
- include (${SELF_DIR}/@HDF4_PACKAGE@@HDF_PACKAGE_EXT at -targets.cmake)
- set (HDF4_LIBRARIES "@HDF4_LIBRARIES_TO_EXPORT@")
-endif (NOT TARGET "@HDF4_PACKAGE@")
diff --git a/config/cmake_ext_mod/runTest.cmake b/config/cmake/jrunTest.cmake
similarity index 50%
copy from config/cmake_ext_mod/runTest.cmake
copy to config/cmake/jrunTest.cmake
index bfaae2b..7e7d1f1 100644
--- a/config/cmake_ext_mod/runTest.cmake
+++ b/config/cmake/jrunTest.cmake
@@ -3,106 +3,95 @@
cmake_policy(SET CMP0007 NEW)
# arguments checking
+if (NOT TEST_TESTER)
+ message (FATAL_ERROR "Require TEST_TESTER to be defined")
+endif (NOT TEST_TESTER)
if (NOT TEST_PROGRAM)
message (FATAL_ERROR "Require TEST_PROGRAM to be defined")
endif (NOT TEST_PROGRAM)
-#if (NOT TEST_ARGS)
-# message (STATUS "Require TEST_ARGS to be defined")
-#endif (NOT TEST_ARGS)
+if (NOT TEST_LIBRARY_DIRECTORY)
+ message (STATUS "Require TEST_LIBRARY_DIRECTORY to be defined")
+endif (NOT TEST_LIBRARY_DIRECTORY)
if (NOT TEST_FOLDER)
message ( FATAL_ERROR "Require TEST_FOLDER to be defined")
endif (NOT TEST_FOLDER)
if (NOT TEST_OUTPUT)
message (FATAL_ERROR "Require TEST_OUTPUT to be defined")
endif (NOT TEST_OUTPUT)
-if (NOT TEST_EXPECT)
- message (STATUS "Require TEST_EXPECT to be defined")
-endif (NOT TEST_EXPECT)
-#if (NOT TEST_FILTER)
-# message (STATUS "Require TEST_FILTER to be defined")
-#endif (NOT TEST_FILTER)
-if (NOT TEST_SKIP_COMPARE AND NOT TEST_REFERENCE)
- message (FATAL_ERROR "Require TEST_REFERENCE to be defined")
-endif (NOT TEST_SKIP_COMPARE AND NOT TEST_REFERENCE)
+if (NOT TEST_CLASSPATH)
+ message (STATUS "Require TEST_CLASSPATH to be defined")
+endif (NOT TEST_CLASSPATH)
+if (NOT TEST_REFERENCE)
+ message (STATUS "Require TEST_REFERENCE to be defined")
+endif (NOT TEST_REFERENCE)
if (NOT TEST_ERRREF)
- set (ERROR_APPEND 1)
+ if (NOT SKIP_APPEND)
+ # append error file since skip was not defined
+ set (ERROR_APPEND 1)
+ endif(NOT SKIP_APPEND)
endif (NOT TEST_ERRREF)
-message (STATUS "COMMAND: ${TEST_PROGRAM} ${TEST_ARGS}")
-
-if (TEST_ENV_VAR)
- set (ENV{${TEST_ENV_VAR}} "${TEST_ENV_VALUE}")
-endif (TEST_ENV_VAR)
-
-if (NOT TEST_INPUT)
- # run the test program, capture the stdout/stderr and the result var
- EXECUTE_PROCESS (
- COMMAND ${TEST_PROGRAM} ${TEST_ARGS}
- WORKING_DIRECTORY ${TEST_FOLDER}
- RESULT_VARIABLE TEST_RESULT
- OUTPUT_FILE ${TEST_OUTPUT}
- ERROR_FILE ${TEST_OUTPUT}.err
- OUTPUT_VARIABLE TEST_ERROR
- ERROR_VARIABLE TEST_ERROR
- )
-else (NOT TEST_INPUT)
- # run the test program with stdin, capture the stdout/stderr and the result var
- EXECUTE_PROCESS (
- COMMAND ${TEST_PROGRAM} ${TEST_ARGS}
- WORKING_DIRECTORY ${TEST_FOLDER}
- RESULT_VARIABLE TEST_RESULT
- INPUT_FILE ${TEST_INPUT}
- OUTPUT_FILE ${TEST_OUTPUT}
- ERROR_FILE ${TEST_OUTPUT}.err
- OUTPUT_VARIABLE TEST_ERROR
- ERROR_VARIABLE TEST_ERROR
- )
-endif (NOT TEST_INPUT)
+if (NOT TEST_LOG_LEVEL)
+ set (LOG_LEVEL "info")
+else (NOT TEST_LOG_LEVEL)
+ set (LOG_LEVEL "${TEST_LOG_LEVEL}")
+endif (NOT TEST_LOG_LEVEL)
+
+message (STATUS "COMMAND: ${TEST_TESTER} -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=${LOG_LEVEL} -Djava.library.path=\"${TEST_LIBRARY_DIRECTORY}\" -cp \"${TEST_CLASSPATH}\" ${TEST_ARGS} ${TEST_PROGRAM} ${ARGN}")
+
+if (WIN32 AND NOT MINGW)
+ set (ENV{PATH} "$ENV{PATH}\\;${TEST_LIBRARY_DIRECTORY}")
+endif (WIN32 AND NOT MINGW)
+
+# run the test program, capture the stdout/stderr and the result var
+execute_process (
+ COMMAND ${TEST_TESTER} -Xmx1024M
+ -Dorg.slf4j.simpleLogger.defaultLogLevel=${LOG_LEVEL}
+ -Djava.library.path=${TEST_LIBRARY_DIRECTORY}
+ -cp "${TEST_CLASSPATH}" ${TEST_ARGS} ${TEST_PROGRAM}
+ ${ARGN}
+ WORKING_DIRECTORY ${TEST_FOLDER}
+ RESULT_VARIABLE TEST_RESULT
+ OUTPUT_FILE ${TEST_OUTPUT}
+ ERROR_FILE ${TEST_OUTPUT}.err
+ ERROR_VARIABLE TEST_ERROR
+)
message (STATUS "COMMAND Result: ${TEST_RESULT}")
-if (ERROR_APPEND)
+if (EXISTS ${TEST_FOLDER}/${TEST_OUTPUT}.err)
file (READ ${TEST_FOLDER}/${TEST_OUTPUT}.err TEST_STREAM)
- file (APPEND ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
-endif (ERROR_APPEND)
-
-if (TEST_APPEND)
- file (APPEND ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_APPEND} ${TEST_RESULT}\n")
-endif (TEST_APPEND)
-
-# if the return value is !=${TEST_EXPECT} bail out
-if (NOT ${TEST_RESULT} STREQUAL ${TEST_EXPECT})
- message ( FATAL_ERROR "Failed: Test program ${TEST_PROGRAM} exited != ${TEST_EXPECT}.\n${TEST_ERROR}")
-endif (NOT ${TEST_RESULT} STREQUAL ${TEST_EXPECT})
-
-message (STATUS "COMMAND Error: ${TEST_ERROR}")
-
-if (TEST_MASK)
- file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
- STRING(REGEX REPLACE "Storage:[^\n]+\n" "Storage: <details removed for portability>\n" TEST_STREAM "${TEST_STREAM}")
- file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
-endif (TEST_MASK)
+ if (TEST_MASK_FILE)
+ STRING(REGEX REPLACE "CurrentDir is [^\n]+\n" "CurrentDir is (dir name)\n" TEST_STREAM "${TEST_STREAM}")
+ endif (TEST_MASK_FILE)
-if (TEST_MASK_MOD)
- file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
- STRING(REGEX REPLACE "Modified:[^\n]+\n" "Modified: XXXX-XX-XX XX:XX:XX XXX\n" TEST_STREAM "${TEST_STREAM}")
- file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
-endif (TEST_MASK_MOD)
+ if (NOT ERROR_APPEND)
+ # append error output to the stdout output file
+ file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT}.err "${TEST_STREAM}")
+ else (NOT ERROR_APPEND)
+ # write back to original .err file
+ file (APPEND ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
+ endif (NOT ERROR_APPEND)
+endif (EXISTS ${TEST_FOLDER}/${TEST_OUTPUT}.err)
if (TEST_MASK_ERROR)
if (NOT TEST_ERRREF)
+ # the error stack has been appended to the output file
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
else (NOT TEST_ERRREF)
+ # the error stack remains in the .err file
file (READ ${TEST_FOLDER}/${TEST_OUTPUT}.err TEST_STREAM)
endif (NOT TEST_ERRREF)
- STRING(REGEX REPLACE "thread [0-9]*:" "thread (IDs):" TEST_STREAM "${TEST_STREAM}")
- STRING(REGEX REPLACE ": ([^\n]*)[.]c " ": (file name) " TEST_STREAM "${TEST_STREAM}")
- STRING(REGEX REPLACE " line [0-9]*" " line (number)" TEST_STREAM "${TEST_STREAM}")
- STRING(REGEX REPLACE "v[1-9]*[.][0-9]*[.]" "version (number)." TEST_STREAM "${TEST_STREAM}")
- STRING(REGEX REPLACE "[1-9]*[.][0-9]*[.][0-9]*[^)]*" "version (number)" TEST_STREAM "${TEST_STREAM}")
- STRING(REGEX REPLACE "H5Eget_auto[1-2]*" "H5Eget_auto(1 or 2)" TEST_STREAM "${TEST_STREAM}")
- STRING(REGEX REPLACE "H5Eset_auto[1-2]*" "H5Eset_auto(1 or 2)" TEST_STREAM "${TEST_STREAM}")
+ string (REGEX REPLACE "Time:[^\n]+\n" "Time: XXXX\n" TEST_STREAM "${TEST_STREAM}")
+ string (REGEX REPLACE "thread [0-9]*:" "thread (IDs):" TEST_STREAM "${TEST_STREAM}")
+ string (REGEX REPLACE ": ([^\n]*)[.]c " ": (file name) " TEST_STREAM "${TEST_STREAM}")
+ string (REGEX REPLACE " line [0-9]*" " line (number)" TEST_STREAM "${TEST_STREAM}")
+ #string (REGEX REPLACE "v[1-9]*[.][0-9]*[.]" "version (number)." TEST_STREAM "${TEST_STREAM}")
+ string (REGEX REPLACE "HDF5 .[1-9]*[.][0-9]*[.][0-9]*[^)]*" "HDF5 (version (number)" TEST_STREAM "${TEST_STREAM}")
+ string (REGEX REPLACE "H5Eget_auto[1-2]*" "H5Eget_auto(1 or 2)" TEST_STREAM "${TEST_STREAM}")
+ string (REGEX REPLACE "H5Eset_auto[1-2]*" "H5Eset_auto(1 or 2)" TEST_STREAM "${TEST_STREAM}")
+ # write back the changes to the original files
if (NOT TEST_ERRREF)
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
else (NOT TEST_ERRREF)
@@ -110,12 +99,15 @@ if (TEST_MASK_ERROR)
endif (NOT TEST_ERRREF)
endif (TEST_MASK_ERROR)
-if (TEST_FILTER)
- file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
- STRING(REGEX REPLACE "${TEST_FILTER}" "" TEST_STREAM "${TEST_STREAM}")
- file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
-endif (TEST_FILTER)
+# if the return value is !=0 bail out
+if (NOT ${TEST_RESULT} STREQUAL ${TEST_EXPECT})
+ message (STATUS "ERROR OUTPUT: ${TEST_STREAM}")
+ message (FATAL_ERROR "Failed: Test program ${TEST_PROGRAM} exited != 0.\n${TEST_ERROR}")
+endif (NOT ${TEST_RESULT} STREQUAL ${TEST_EXPECT})
+message (STATUS "COMMAND Error: ${TEST_ERROR}")
+
+# compare output files to references unless this must be skipped
if (NOT TEST_SKIP_COMPARE)
if (WIN32 AND NOT MINGW)
file (READ ${TEST_FOLDER}/${TEST_REFERENCE} TEST_STREAM)
@@ -123,21 +115,21 @@ if (NOT TEST_SKIP_COMPARE)
endif (WIN32 AND NOT MINGW)
# now compare the output with the reference
- EXECUTE_PROCESS (
+ execute_process (
COMMAND ${CMAKE_COMMAND} -E compare_files ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_FOLDER}/${TEST_REFERENCE}
RESULT_VARIABLE TEST_RESULT
)
if (NOT ${TEST_RESULT} STREQUAL 0)
set (TEST_RESULT 0)
file (STRINGS ${TEST_FOLDER}/${TEST_OUTPUT} test_act)
- LIST (LENGTH test_act len_act)
+ list (LENGTH test_act len_act)
file (STRINGS ${TEST_FOLDER}/${TEST_REFERENCE} test_ref)
- LIST (LENGTH test_ref len_ref)
+ list (LENGTH test_ref len_ref)
if (NOT ${len_act} STREQUAL "0")
MATH (EXPR _FP_LEN "${len_ref} - 1")
foreach (line RANGE 0 ${_FP_LEN})
- LIST (GET test_act ${line} str_act)
- LIST (GET test_ref ${line} str_ref)
+ list (GET test_act ${line} str_act)
+ list (GET test_ref ${line} str_ref)
if (NOT "${str_act}" STREQUAL "${str_ref}")
if (NOT "${str_act}" STREQUAL "")
set (TEST_RESULT 1)
@@ -157,7 +149,8 @@ if (NOT TEST_SKIP_COMPARE)
if (NOT ${TEST_RESULT} STREQUAL 0)
message (FATAL_ERROR "Failed: The output of ${TEST_OUTPUT} did not match ${TEST_REFERENCE}")
endif (NOT ${TEST_RESULT} STREQUAL 0)
-
+
+ # now compare the .err file with the error reference, if supplied
if (TEST_ERRREF)
if (WIN32 AND NOT MINGW)
file (READ ${TEST_FOLDER}/${TEST_ERRREF} TEST_STREAM)
@@ -165,22 +158,22 @@ if (NOT TEST_SKIP_COMPARE)
endif (WIN32 AND NOT MINGW)
# now compare the error output with the error reference
- EXECUTE_PROCESS (
+ execute_process (
COMMAND ${CMAKE_COMMAND} -E compare_files ${TEST_FOLDER}/${TEST_OUTPUT}.err ${TEST_FOLDER}/${TEST_ERRREF}
RESULT_VARIABLE TEST_RESULT
)
if (NOT ${TEST_RESULT} STREQUAL 0)
set (TEST_RESULT 0)
file (STRINGS ${TEST_FOLDER}/${TEST_OUTPUT}.err test_act)
- LIST (LENGTH test_act len_act)
+ list (LENGTH test_act len_act)
file (STRINGS ${TEST_FOLDER}/${TEST_ERRREF} test_ref)
- LIST (LENGTH test_ref len_ref)
+ list (LENGTH test_ref len_ref)
MATH (EXPR _FP_LEN "${len_ref} - 1")
if (NOT ${len_act} STREQUAL "0")
MATH (EXPR _FP_LEN "${len_ref} - 1")
foreach (line RANGE 0 ${_FP_LEN})
- LIST (GET test_act ${line} str_act)
- LIST (GET test_ref ${line} str_ref)
+ list (GET test_act ${line} str_act)
+ list (GET test_ref ${line} str_ref)
if (NOT "${str_act}" STREQUAL "${str_ref}")
if (NOT "${str_act}" STREQUAL "")
set (TEST_RESULT 1)
@@ -203,5 +196,27 @@ if (NOT TEST_SKIP_COMPARE)
endif (TEST_ERRREF)
endif (NOT TEST_SKIP_COMPARE)
+if (TEST_GREP_COMPARE)
+ # now grep the output with the reference
+ file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
+
+ # TEST_REFERENCE should always be matched
+ string (REGEX MATCH "${TEST_REFERENCE}" TEST_MATCH ${TEST_STREAM})
+ string (COMPARE EQUAL "${TEST_REFERENCE}" "${TEST_MATCH}" TEST_RESULT)
+ if (${TEST_RESULT} STREQUAL "0")
+ message (FATAL_ERROR "Failed: The output of ${TEST_PROGRAM} did not contain ${TEST_REFERENCE}")
+ endif (${TEST_RESULT} STREQUAL "0")
+
+ string (REGEX MATCH "${TEST_FILTER}" TEST_MATCH ${TEST_STREAM})
+ if (${TEST_EXPECT} STREQUAL "1")
+ # TEST_EXPECT (1) interperts TEST_FILTER as NOT to match
+ string (LENGTH "${TEST_MATCH}" TEST_RESULT)
+ if (NOT ${TEST_RESULT} STREQUAL "0")
+ message (FATAL_ERROR "Failed: The output of ${TEST_PROGRAM} did contain ${TEST_FILTER}")
+ endif (NOT ${TEST_RESULT} STREQUAL "0")
+ endif (${TEST_EXPECT} STREQUAL "0")
+endif (TEST_GREP_COMPARE)
+
# everything went fine...
-message ("Passed: The output of ${TEST_PROGRAM} matches ${TEST_REFERENCE}")
+message ("${TEST_PROGRAM} Passed")
+
diff --git a/config/cmake/libhdf4.settings.cmake.in b/config/cmake/libhdf4.settings.cmake.in
index 733a2e7..9c5840d 100644
--- a/config/cmake/libhdf4.settings.cmake.in
+++ b/config/cmake/libhdf4.settings.cmake.in
@@ -19,7 +19,7 @@ Compiling Options:
CFLAGS: @CMAKE_C_FLAGS@
CPPFLAGS: @CMAKE_CXX_FLAGS@
Shared Libraries: @H4_ENABLE_SHARED_LIB@
- Static Libraries: @H4_ENABLE_STATIC_LIB@
+ Static Libraries: YES
Statically Linked Executables: @BUILD_STATIC_EXECS@
LDFLAGS: @CMAKE_SHARED_LINKER_FLAGS@
Extra libraries: @LINK_LIBS@
@@ -32,6 +32,9 @@ Languages:
@BUILD_FORTRAN_CONDITIONAL_TRUE@ Fortran Compiler: @CMAKE_Fortran_COMPILER@
@BUILD_FORTRAN_CONDITIONAL_TRUE@ Fortran Flags: @CMAKE_Fortran_FLAGS@
+ JAVA: @HDF4_BUILD_JAVA@
+ at BUILD_JAVA_CONDITIONAL_TRUE@ JAVA Compiler: @CMAKE_Java_COMPILER@ @Java_VERSION@
+
Features:
---------
SZIP compression: @SZIP_INFO@
diff --git a/config/cmake/patch.xml b/config/cmake/patch.xml
new file mode 100644
index 0000000..cc086dc
--- /dev/null
+++ b/config/cmake/patch.xml
@@ -0,0 +1,11 @@
+<CPackWiXPatch>
+ <CPackWiXFragment Id="CM_CP_libraries.bin.hdf.dll">
+ <Environment Id="PATH"
+ Name="PATH"
+ Value="[CM_DP_libraries.bin]"
+ Permanent="no"
+ Part="last"
+ Action="set"
+ System="yes"/>
+ </CPackWiXFragment>
+</CPackWiXPatch>
diff --git a/config/cmake_ext_mod/CPack.Info.plist.in b/config/cmake_ext_mod/CPack.Info.plist.in
deleted file mode 100644
index 6cf2a94..0000000
--- a/config/cmake_ext_mod/CPack.Info.plist.in
+++ /dev/null
@@ -1,32 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
- <key>CFBundleDevelopmentRegion</key>
- <string>English</string>
- <key>CFBundleExecutable</key>
- <string>@CPACK_PACKAGE_FILE_NAME@</string>
- <key>CFBundleIconFile</key>
- <string>@CPACK_BUNDLE_ICON@</string>
- <key>CFBundleIdentifier</key>
- <string>org. at CPACK_PACKAGE_VENDOR@. at CPACK_PACKAGE_NAME@@CPACK_MODULE_VERSION_SUFFIX@</string>
- <key>CFBundleInfoDictionaryVersion</key>
- <string>6.0</string>
- <key>CFBundlePackageType</key>
- <string>FMWK</string>
- <key>CFBundleSignature</key>
- <string>????</string>
- <key>CFBundleVersion</key>
- <string>@CPACK_PACKAGE_VERSIO@</string>
- <key>CFBundleShortVersionString</key>
- <string>@CPACK_SHORT_VERSION_STRING@</string>
- <key>CSResourcesFileMapped</key>
- <true/>
- <key>CFBundleName</key>
- <string>@CPACK_BUNDLE_NAME@</string>
- <key>CFBundleGetInfoString</key>
- <string>@CPACK_APPLE_GUI_INFO_STRING@</string>
- <key>NSHumanReadableCopyright</key>
- <string>@CPACK_APPLE_GUI_COPYRIGHT@</string>
-</dict>
-</plist>
diff --git a/config/cmake_ext_mod/ConfigureChecks.cmake b/config/cmake_ext_mod/ConfigureChecks.cmake
index d715383..c4fabf1 100644
--- a/config/cmake_ext_mod/ConfigureChecks.cmake
+++ b/config/cmake_ext_mod/ConfigureChecks.cmake
@@ -11,7 +11,9 @@ include (${CMAKE_ROOT}/Modules/CheckTypeSize.cmake)
include (${CMAKE_ROOT}/Modules/CheckVariableExists.cmake)
include (${CMAKE_ROOT}/Modules/CheckFortranFunctionExists.cmake)
include (${CMAKE_ROOT}/Modules/TestBigEndian.cmake)
-include (${CMAKE_ROOT}/Modules/TestForSTDNamespace.cmake)
+if(CMAKE_CXX_COMPILER)
+ include (${CMAKE_ROOT}/Modules/TestForSTDNamespace.cmake)
+endif(CMAKE_CXX_COMPILER)
#-----------------------------------------------------------------------------
# APPLE/Darwin setup
@@ -29,13 +31,13 @@ if (APPLE)
endif (APPLE)
# Check for Darwin (not just Apple - we also want to catch OpenDarwin)
-if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
- set (${HDF_PREFIX}_HAVE_DARWIN 1)
+if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
+ set (${HDF_PREFIX}_HAVE_DARWIN 1)
endif (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
# Check for Solaris
-if (${CMAKE_SYSTEM_NAME} MATCHES "SunOS")
- set (${HDF_PREFIX}_HAVE_SOLARIS 1)
+if (${CMAKE_SYSTEM_NAME} MATCHES "SunOS")
+ set (${HDF_PREFIX}_HAVE_SOLARIS 1)
endif (${CMAKE_SYSTEM_NAME} MATCHES "SunOS")
#-----------------------------------------------------------------------------
@@ -90,7 +92,6 @@ if (WINDOWS)
set (${HDF_PREFIX}_GETTIMEOFDAY_GIVES_TZ 1)
set (${HDF_PREFIX}_HAVE_TIMEZONE 1)
set (${HDF_PREFIX}_HAVE_GETTIMEOFDAY 1)
- set (${HDF_PREFIX}_LONE_COLON 0)
if (MINGW)
set (${HDF_PREFIX}_HAVE_WINSOCK2_H 1)
endif (MINGW)
@@ -116,13 +117,8 @@ if (NOT WINDOWS)
CHECK_LIBRARY_EXISTS_CONCAT ("wsock32" gethostbyname ${HDF_PREFIX}_HAVE_LIBWSOCK32)
endif (NOT WINDOWS)
+# UCB (BSD) compatibility library
CHECK_LIBRARY_EXISTS_CONCAT ("ucb" gethostname ${HDF_PREFIX}_HAVE_LIBUCB)
-CHECK_LIBRARY_EXISTS_CONCAT ("socket" connect ${HDF_PREFIX}_HAVE_LIBSOCKET)
-CHECK_LIBRARY_EXISTS ("c" gethostbyname "" NOT_NEED_LIBNSL)
-
-if (NOT NOT_NEED_LIBNSL)
- CHECK_LIBRARY_EXISTS_CONCAT ("nsl" gethostbyname ${HDF_PREFIX}_HAVE_LIBNSL)
-endif (NOT NOT_NEED_LIBNSL)
# For other tests to use the same libraries
set (CMAKE_REQUIRED_LIBRARIES ${CMAKE_REQUIRED_LIBRARIES} ${LINK_LIBS})
@@ -145,10 +141,6 @@ MACRO (HDF_FUNCTION_TEST OTHER_TEST)
set (OTHER_TEST_ADD_LIBRARIES "-DLINK_LIBRARIES:STRING=${CMAKE_REQUIRED_LIBRARIES}")
endif (CMAKE_REQUIRED_LIBRARIES)
- foreach (def ${HDF_EXTRA_TEST_DEFINITIONS})
- set (MACRO_CHECK_FUNCTION_DEFINITIONS "${MACRO_CHECK_FUNCTION_DEFINITIONS} -D${def}=${${def}}")
- endforeach (def)
-
foreach (def
HAVE_SYS_TIME_H
HAVE_UNISTD_H
@@ -180,7 +172,7 @@ MACRO (HDF_FUNCTION_TEST OTHER_TEST)
else (${OTHER_TEST})
message (STATUS "Performing Other Test ${OTHER_TEST} - Failed")
set (${HDF_PREFIX}_${OTHER_TEST} "" CACHE INTERNAL "Other test ${FUNCTION}")
- file (APPEND ${CMAKE_BINARY_DIR}/CMakeFiles/CMakeError.log
+ file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
"Performing Other Test ${OTHER_TEST} failed with the following output:\n"
"${OUTPUT}\n"
)
@@ -254,7 +246,6 @@ CHECK_INCLUDE_FILE_CONCAT ("pthread.h" ${HDF_PREFIX}_HAVE_PTHREAD_H)
CHECK_INCLUDE_FILE_CONCAT ("srbclient.h" ${HDF_PREFIX}_HAVE_SRBCLIENT_H)
CHECK_INCLUDE_FILE_CONCAT ("string.h" ${HDF_PREFIX}_HAVE_STRING_H)
CHECK_INCLUDE_FILE_CONCAT ("strings.h" ${HDF_PREFIX}_HAVE_STRINGS_H)
-CHECK_INCLUDE_FILE_CONCAT ("time.h" ${HDF_PREFIX}_HAVE_TIME_H)
CHECK_INCLUDE_FILE_CONCAT ("stdlib.h" ${HDF_PREFIX}_HAVE_STDLIB_H)
CHECK_INCLUDE_FILE_CONCAT ("memory.h" ${HDF_PREFIX}_HAVE_MEMORY_H)
CHECK_INCLUDE_FILE_CONCAT ("dlfcn.h" ${HDF_PREFIX}_HAVE_DLFCN_H)
@@ -271,17 +262,24 @@ set (LINUX_LFS 0)
set (HDF_EXTRA_C_FLAGS)
set (HDF_EXTRA_FLAGS)
if (NOT WINDOWS)
- if (NOT ${HDF_PREFIX}_HAVE_SOLARIS)
+ # Might want to check explicitly for Linux and possibly Cygwin
+ # instead of checking for not Solaris or Darwin.
+ if (NOT ${HDF_PREFIX}_HAVE_SOLARIS AND NOT ${HDF_PREFIX}_HAVE_DARWIN)
# Linux Specific flags
# This was originally defined as _POSIX_SOURCE which was updated to
# _POSIX_C_SOURCE=199506L to expose a greater amount of POSIX
# functionality so clock_gettime and CLOCK_MONOTONIC are defined
- # correctly.
+ # correctly. This was later updated to 200112L so that
+ # posix_memalign() is visible for the direct VFD code on Linux
+ # systems.
# POSIX feature information can be found in the gcc manual at:
# http://www.gnu.org/s/libc/manual/html_node/Feature-Test-Macros.html
- set (HDF_EXTRA_C_FLAGS -D_POSIX_C_SOURCE=199506L)
- set (HDF_EXTRA_FLAGS -D_BSD_SOURCE)
-
+ set (HDF_EXTRA_C_FLAGS -D_POSIX_C_SOURCE=200112L)
+
+ # Need to add this so that O_DIRECT is visible for the direct
+ # VFD on Linux systems.
+ set (HDF_EXTRA_C_FLAGS -D_GNU_SOURCE)
+
option (HDF_ENABLE_LARGE_FILE "Enable support for large (64-bit) files on Linux." ON)
if (HDF_ENABLE_LARGE_FILE)
set (msg "Performing TEST_LFS_WORKS")
@@ -291,6 +289,11 @@ if (NOT WINDOWS)
CMAKE_FLAGS -DCOMPILE_DEFINITIONS:STRING=-DTEST_LFS_WORKS
OUTPUT_VARIABLE OUTPUT
)
+
+ # The LARGEFILE definitions were from the transition period
+ # and are probably no longer needed. The FILE_OFFSET_BITS
+ # check should be generalized for all POSIX systems as it
+ # is in the Autotools.
if (TEST_LFS_WORKS_COMPILE)
if (TEST_LFS_WORKS_RUN MATCHES 0)
set (TEST_LFS_WORKS 1 CACHE INTERNAL ${msg})
@@ -300,20 +303,20 @@ if (NOT WINDOWS)
else (TEST_LFS_WORKS_RUN MATCHES 0)
set (TEST_LFS_WORKS "" CACHE INTERNAL ${msg})
message (STATUS "${msg}... no")
- file (APPEND ${CMAKE_BINARY_DIR}/CMakeFiles/CMakeError.log
+ file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
"Test TEST_LFS_WORKS Run failed with the following output and exit code:\n ${OUTPUT}\n"
)
endif (TEST_LFS_WORKS_RUN MATCHES 0)
else (TEST_LFS_WORKS_COMPILE )
set (TEST_LFS_WORKS "" CACHE INTERNAL ${msg})
message (STATUS "${msg}... no")
- file (APPEND ${CMAKE_BINARY_DIR}/CMakeFiles/CMakeError.log
+ file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
"Test TEST_LFS_WORKS Compile failed with the following output:\n ${OUTPUT}\n"
)
endif (TEST_LFS_WORKS_COMPILE)
endif (HDF_ENABLE_LARGE_FILE)
set (CMAKE_REQUIRED_DEFINITIONS ${CMAKE_REQUIRED_DEFINITIONS} ${HDF_EXTRA_FLAGS})
- endif (NOT ${HDF_PREFIX}_HAVE_SOLARIS)
+ endif (NOT ${HDF_PREFIX}_HAVE_SOLARIS AND NOT ${HDF_PREFIX}_HAVE_DARWIN)
endif (NOT WINDOWS)
add_definitions (${HDF_EXTRA_FLAGS})
@@ -416,6 +419,19 @@ if (NOT ${HDF_PREFIX}_SIZEOF_OFF64_T)
set (${HDF_PREFIX}_SIZEOF_OFF64_T 0)
endif (NOT ${HDF_PREFIX}_SIZEOF_OFF64_T)
+#-----------------------------------------------------------------------------
+# Extra C99 types
+#-----------------------------------------------------------------------------
+
+# _Bool type support
+CHECK_INCLUDE_FILE_CONCAT (stdbool.h ${HDF_PREFIX}_HAVE_STDBOOL_H)
+if (HAVE_STDBOOL_H)
+ set (CMAKE_EXTRA_INCLUDE_FILES stdbool.h)
+ HDF_CHECK_TYPE_SIZE (bool ${HDF_PREFIX}_SIZEOF_BOOL)
+else (HAVE_STDBOOL_H)
+ HDF_CHECK_TYPE_SIZE (_Bool ${HDF_PREFIX}_SIZEOF_BOOL)
+endif (HAVE_STDBOOL_H)
+
if (NOT WINDOWS)
#-----------------------------------------------------------------------------
# Check if the dev_t type is a scalar type
@@ -431,6 +447,7 @@ if (NOT WINDOWS)
#-----------------------------------------------------------------------------
# Check a bunch of time functions
#-----------------------------------------------------------------------------
+ CHECK_FUNCTION_EXISTS (gettimeofday ${HDF_PREFIX}_HAVE_GETTIMEOFDAY)
foreach (test
HAVE_TM_GMTOFF
HAVE___TM_GMTOFF
@@ -491,7 +508,6 @@ CHECK_FUNCTION_EXISTS (longjmp ${HDF_PREFIX}_HAVE_LONGJMP)
CHECK_FUNCTION_EXISTS (setjmp ${HDF_PREFIX}_HAVE_SETJMP)
CHECK_FUNCTION_EXISTS (siglongjmp ${HDF_PREFIX}_HAVE_SIGLONGJMP)
CHECK_FUNCTION_EXISTS (sigsetjmp ${HDF_PREFIX}_HAVE_SIGSETJMP)
-CHECK_FUNCTION_EXISTS (sigaction ${HDF_PREFIX}_HAVE_SIGACTION)
CHECK_FUNCTION_EXISTS (sigprocmask ${HDF_PREFIX}_HAVE_SIGPROCMASK)
CHECK_FUNCTION_EXISTS (snprintf ${HDF_PREFIX}_HAVE_SNPRINTF)
@@ -525,15 +541,10 @@ if (NOT ${HDF_PREFIX}_HAVE_SIGSETJMP)
endif (NOT ${HDF_PREFIX}_HAVE_SIGSETJMP)
#-----------------------------------------------------------------------------
-# Check for Symbols
-CHECK_SYMBOL_EXISTS (tzname "time.h" ${HDF_PREFIX}_HAVE_DECL_TZNAME)
-
-#-----------------------------------------------------------------------------
# Check a bunch of other functions
#-----------------------------------------------------------------------------
if (NOT WINDOWS)
foreach (test
- LONE_COLON
HAVE_ATTRIBUTE
HAVE_C99_FUNC
# STDC_HEADERS
@@ -556,10 +567,6 @@ MACRO (HDF_CXX_FUNCTION_TEST OTHER_TEST)
set (OTHER_TEST_ADD_LIBRARIES "-DLINK_LIBRARIES:STRING=${CMAKE_REQUIRED_LIBRARIES}")
endif (CMAKE_REQUIRED_LIBRARIES)
- foreach (def ${HDF_EXTRA_TEST_DEFINITIONS})
- set (MACRO_CHECK_FUNCTION_DEFINITIONS "${MACRO_CHECK_FUNCTION_DEFINITIONS} -D${def}=${${def}}")
- endforeach (def)
-
foreach (def
HAVE_SYS_TIME_H
HAVE_UNISTD_H
@@ -626,7 +633,7 @@ if (WINDOWS)
"${CURRENT_TEST_DEFINITIONS} -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_LARGEFILE_SOURCE"
)
endif (LARGEFILE)
- set (MACRO_CHECK_FUNCTION_DEFINITIONS
+ set (MACRO_CHECK_FUNCTION_DEFINITIONS
"-DHAVE_IOEO ${CMAKE_REQUIRED_FLAGS}")
if (CMAKE_REQUIRED_LIBRARIES)
set (CHECK_C_SOURCE_COMPILES_ADD_LIBRARIES
@@ -658,7 +665,7 @@ if (WINDOWS)
if ("${HAVE_IOEO_EXITCODE}" EQUAL 0)
set (${HDF_PREFIX}_HAVE_IOEO 1 CACHE INTERNAL "Test InitOnceExecuteOnce")
message (STATUS "Performing Test InitOnceExecuteOnce - Success")
- file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log
+ file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log
"Performing C SOURCE FILE Test InitOnceExecuteOnce succeded with the following output:\n"
"${OUTPUT}\n"
"Return value: ${HAVE_IOEO}\n")
@@ -670,7 +677,7 @@ if (WINDOWS)
endif (CMAKE_CROSSCOMPILING AND "${HAVE_IOEO_EXITCODE}" MATCHES "FAILED_TO_RUN")
message (STATUS "Performing Test InitOnceExecuteOnce - Failed")
- file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
+ file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
"Performing InitOnceExecuteOnce Test failed with the following output:\n"
"${OUTPUT}\n"
"Return value: ${HAVE_IOEO_EXITCODE}\n")
@@ -682,60 +689,45 @@ endif (WINDOWS)
#-----------------------------------------------------------------------------
# Determine how 'inline' is used
#-----------------------------------------------------------------------------
-set (HDF_EXTRA_TEST_DEFINITIONS INLINE_TEST_INLINE)
foreach (inline_test inline __inline__ __inline)
- set (INLINE_TEST_INLINE ${inline_test})
- HDF_FUNCTION_TEST (INLINE_TEST_${inline_test})
+ string (TOUPPER ${inline_test} INLINE_TEST_MACRO)
+ HDF_FUNCTION_TEST (HAVE_${INLINE_TEST_MACRO})
endforeach (inline_test)
-set (HDF_EXTRA_TEST_DEFINITIONS)
-if (INLINE_TEST___inline__)
- set (${HDF_PREFIX}_inline __inline__)
-else (INLINE_TEST___inline__)
- if (INLINE_TEST___inline)
- set (${HDF_PREFIX}_inline __inline)
- else (INLINE_TEST___inline)
- if (INLINE_TEST_inline)
- set (${HDF_PREFIX}_inline inline)
- endif (INLINE_TEST_inline)
- endif (INLINE_TEST___inline)
-endif (INLINE_TEST___inline__)
-
#-----------------------------------------------------------------------------
# Check how to print a Long Long integer
#-----------------------------------------------------------------------------
if (NOT ${HDF_PREFIX}_PRINTF_LL_WIDTH OR ${HDF_PREFIX}_PRINTF_LL_WIDTH MATCHES "unknown")
set (PRINT_LL_FOUND 0)
message (STATUS "Checking for appropriate format for 64 bit long:")
- foreach (HDF5_PRINTF_LL l64 l L q I64 ll)
- set (CURRENT_TEST_DEFINITIONS "-DPRINTF_LL_WIDTH=${HDF5_PRINTF_LL}")
- if (${HDF_PREFIX}_SIZEOF_LONG_LONG)
- set (CURRENT_TEST_DEFINITIONS "${CURRENT_TEST_DEFINITIONS} -DHAVE_LONG_LONG")
- endif (${HDF_PREFIX}_SIZEOF_LONG_LONG)
- TRY_RUN (HDF5_PRINTF_LL_TEST_RUN HDF5_PRINTF_LL_TEST_COMPILE
- ${CMAKE_BINARY_DIR}
- ${HDF_RESOURCES_EXT_DIR}/HDFTests.c
- CMAKE_FLAGS -DCOMPILE_DEFINITIONS:STRING=${CURRENT_TEST_DEFINITIONS}
- OUTPUT_VARIABLE OUTPUT
+ set (CURRENT_TEST_DEFINITIONS "-DPRINTF_LL_WIDTH")
+ if (${HDF_PREFIX}_SIZEOF_LONG_LONG)
+ set (CURRENT_TEST_DEFINITIONS "${CURRENT_TEST_DEFINITIONS} -DHAVE_LONG_LONG")
+ endif (${HDF_PREFIX}_SIZEOF_LONG_LONG)
+ TRY_RUN (${HDF_PREFIX}_PRINTF_LL_TEST_RUN ${HDF_PREFIX}_PRINTF_LL_TEST_COMPILE
+ ${CMAKE_BINARY_DIR}
+ ${HDF_RESOURCES_EXT_DIR}/HDFTests.c
+ CMAKE_FLAGS -DCOMPILE_DEFINITIONS:STRING=${CURRENT_TEST_DEFINITIONS}
+ OUTPUT_VARIABLE OUTPUT
+ )
+ if (${HDF_PREFIX}_PRINTF_LL_TEST_COMPILE)
+ if (${HDF_PREFIX}_PRINTF_LL_TEST_RUN MATCHES 0)
+ string(REGEX REPLACE ".*PRINTF_LL_WIDTH=\\[(.*)\\].*" "\\1" ${HDF_PREFIX}_PRINTF_LL "${OUTPUT}")
+ set (${HDF_PREFIX}_PRINTF_LL_WIDTH "\"${${HDF_PREFIX}_PRINTF_LL}\"" CACHE INTERNAL "Width for printf for type `long long' or `__int64', us. `ll")
+ set (PRINT_LL_FOUND 1)
+ else (${HDF_PREFIX}_PRINTF_LL_TEST_RUN MATCHES 0)
+ message ("Width test failed with result: ${${HDF_PREFIX}_PRINTF_LL_TEST_RUN}")
+ endif (${HDF_PREFIX}_PRINTF_LL_TEST_RUN MATCHES 0)
+ else (${HDF_PREFIX}_PRINTF_LL_TEST_COMPILE)
+ file (APPEND ${CMAKE_BINARY_DIR}/CMakeFiles/CMakeError.log
+ "Test ${HDF_PREFIX}_PRINTF_LL_WIDTH failed with the following output:\n ${OUTPUT}\n"
)
- if (HDF5_PRINTF_LL_TEST_COMPILE)
- if (HDF5_PRINTF_LL_TEST_RUN MATCHES 0)
- set (${HDF_PREFIX}_PRINTF_LL_WIDTH "\"${HDF5_PRINTF_LL}\"" CACHE INTERNAL "Width for printf for type `long long' or `__int64', us. `ll")
- set (PRINT_LL_FOUND 1)
- else (HDF5_PRINTF_LL_TEST_RUN MATCHES 0)
- message ("Width with ${HDF5_PRINTF_LL} failed with result: ${HDF5_PRINTF_LL_TEST_RUN}")
- endif (HDF5_PRINTF_LL_TEST_RUN MATCHES 0)
- else (HDF5_PRINTF_LL_TEST_COMPILE)
- file (APPEND ${CMAKE_BINARY_DIR}/CMakeFiles/CMakeError.log
- "Test ${HDF_PREFIX}_PRINTF_LL_WIDTH for ${HDF5_PRINTF_LL} failed with the following output:\n ${OUTPUT}\n"
- )
- endif (HDF5_PRINTF_LL_TEST_COMPILE)
- endforeach (HDF5_PRINTF_LL)
+ endif (${HDF_PREFIX}_PRINTF_LL_TEST_COMPILE)
if (PRINT_LL_FOUND)
- message (STATUS "Checking for apropriate format for 64 bit long: found ${${HDF_PREFIX}_PRINTF_LL_WIDTH}")
+ message (STATUS "Checking for appropriate format for 64 bit long: found ${${HDF_PREFIX}_PRINTF_LL_WIDTH}")
else (PRINT_LL_FOUND)
- message (STATUS "Checking for apropriate format for 64 bit long: not found")
+ message (STATUS "Checking for appropriate format for 64 bit long: not found")
set (${HDF_PREFIX}_PRINTF_LL_WIDTH "\"unknown\"" CACHE INTERNAL
"Width for printf for type `long long' or `__int64', us. `ll"
)
diff --git a/config/cmake_ext_mod/FindSZIP.cmake b/config/cmake_ext_mod/FindSZIP.cmake
index e8e7f9f..5f0f031 100644
--- a/config/cmake_ext_mod/FindSZIP.cmake
+++ b/config/cmake_ext_mod/FindSZIP.cmake
@@ -156,12 +156,6 @@ if (SZIP_FOUND)
# Add SZIP_INCLUDE_DIR to CMAKE_REQUIRED_INCLUDES
set (CMAKE_REQUIRED_INCLUDES "${CMAKE_REQUIRED_INCLUDES};${SZIP_INCLUDE_DIRS}")
- CHECK_SYMBOL_EXISTS (SZIP_BUILT_AS_DYNAMIC_LIB "SZconfig.h" HAVE_SZIP_DLL)
-
- if (HAVE_SZIP_DLL STREQUAL "TRUE")
- set (HAVE_SZIP_DLL "1")
- endif (HAVE_SZIP_DLL STREQUAL "TRUE")
-
# Restore CMAKE_REQUIRED_INCLUDES and CMAKE_REQUIRED_FLAGS variables
set (CMAKE_REQUIRED_INCLUDES ${CMAKE_REQUIRED_INCLUDES_SAVE})
set (CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS_SAVE})
@@ -174,6 +168,5 @@ if (FIND_SZIP_DEBUG)
message (STATUS "SZIP_INCLUDE_DIRS: ${SZIP_INCLUDE_DIRS}")
message (STATUS "SZIP_LIBRARY_DEBUG: ${SZIP_LIBRARY_DEBUG}")
message (STATUS "SZIP_LIBRARY_RELEASE: ${SZIP_LIBRARY_RELEASE}")
- message (STATUS "HAVE_SZIP_DLL: ${HAVE_SZIP_DLL}")
message (STATUS "CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}")
endif (FIND_SZIP_DEBUG)
diff --git a/config/cmake_ext_mod/HDFLibMacros.cmake b/config/cmake_ext_mod/HDFLibMacros.cmake
index f499b00..3f91405 100644
--- a/config/cmake_ext_mod/HDFLibMacros.cmake
+++ b/config/cmake_ext_mod/HDFLibMacros.cmake
@@ -1,16 +1,16 @@
#-------------------------------------------------------------------------------
-MACRO (EXTERNAL_JPEG_LIBRARY compress_type libtype jpeg_pic)
+macro (EXTERNAL_JPEG_LIBRARY compress_type jpeg_pic)
# May need to build JPEG with PIC on x64 machines with gcc
# Need to use CMAKE_ANSI_CFLAGS define so that compiler test works
if (${compress_type} MATCHES "SVN")
EXTERNALPROJECT_ADD (JPEG
SVN_REPOSITORY ${JPEG_URL}
- # [SVN_REVISION rev]
+ # [SVN_REVISION rev]
INSTALL_COMMAND ""
CMAKE_ARGS
-DBUILD_SHARED_LIBS:BOOL=${BUILD_SHARED_LIBS}
- -DHDF_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
+ -DJPEG_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
-DJPEG_EXTERNALLY_CONFIGURED:BOOL=OFF
-DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX}
@@ -18,14 +18,14 @@ MACRO (EXTERNAL_JPEG_LIBRARY compress_type libtype jpeg_pic)
-DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}
-DCMAKE_ANSI_CFLAGS:STRING=${jpeg_pic}
- )
+ )
elseif (${compress_type} MATCHES "GIT")
EXTERNALPROJECT_ADD (JPEG
GIT_REPOSITORY ${JPEG_URL}
INSTALL_COMMAND ""
CMAKE_ARGS
-DBUILD_SHARED_LIBS:BOOL=${BUILD_SHARED_LIBS}
- -DHDF_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
+ -DJPEG_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
-DJPEG_EXTERNALLY_CONFIGURED:BOOL=OFF
-DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX}
@@ -33,7 +33,7 @@ MACRO (EXTERNAL_JPEG_LIBRARY compress_type libtype jpeg_pic)
-DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}
-DCMAKE_ANSI_CFLAGS:STRING=${jpeg_pic}
- )
+ )
elseif (${compress_type} MATCHES "TGZ")
EXTERNALPROJECT_ADD (JPEG
URL ${JPEG_URL}
@@ -41,7 +41,7 @@ MACRO (EXTERNAL_JPEG_LIBRARY compress_type libtype jpeg_pic)
INSTALL_COMMAND ""
CMAKE_ARGS
-DBUILD_SHARED_LIBS:BOOL=${BUILD_SHARED_LIBS}
- -DHDF_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
+ -DJPEG_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
-DJPEG_EXTERNALLY_CONFIGURED:BOOL=OFF
-DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX}
@@ -49,27 +49,34 @@ MACRO (EXTERNAL_JPEG_LIBRARY compress_type libtype jpeg_pic)
-DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}
-DCMAKE_ANSI_CFLAGS:STRING=${jpeg_pic}
- )
+ )
endif (${compress_type} MATCHES "SVN")
- externalproject_get_property (JPEG BINARY_DIR SOURCE_DIR)
+ externalproject_get_property (JPEG BINARY_DIR SOURCE_DIR)
- # Create imported target szip
- add_library (jpeg ${libtype} IMPORTED)
- HDF_IMPORT_SET_LIB_OPTIONS (jpeg "jpeg" ${libtype} "")
- add_dependencies (JPEG jpeg)
+##include (${BINARY_DIR}/${JPEG_PACKAGE_NAME}${HDF_PACKAGE_EXT}-targets.cmake)
+# Create imported target jpeg-static
+ add_library(jpeg-static STATIC IMPORTED)
+ HDF_IMPORT_SET_LIB_OPTIONS (jpeg-static "jpeg" STATIC "")
+ add_dependencies (JPEG jpeg-static)
+ set (JPEG_STATIC_LIBRARY "jpeg-static")
+ set (JPEG_LIBRARIES ${JPEG_static_LIBRARY})
+ if (BUILD_SHARED_LIBS)
+ # Create imported target jpeg-shared
+ add_library(jpeg-shared SHARED IMPORTED)
+ HDF_IMPORT_SET_LIB_OPTIONS (jpeg-shared "jpeg" SHARED "")
+ add_dependencies (JPEG jpeg-shared)
+ set (JPEG_SHARED_LIBRARY "jpeg-shared")
+ set (JPEG_LIBRARIES ${JPEG_LIBRARIES} ${JPEG_shared_LIBRARY})
+ endif (BUILD_SHARED_LIBS)
-# include (${BINARY_DIR}/JPEG-targets.cmake)
- set (JPEG_LIBRARY "jpeg")
-
set (JPEG_INCLUDE_DIR_GEN "${BINARY_DIR}")
set (JPEG_INCLUDE_DIR "${SOURCE_DIR}/src")
set (JPEG_FOUND 1)
- set (JPEG_LIBRARIES ${JPEG_LIBRARY})
set (JPEG_INCLUDE_DIRS ${JPEG_INCLUDE_DIR_GEN} ${JPEG_INCLUDE_DIR})
-ENDMACRO (EXTERNAL_JPEG_LIBRARY)
+endmacro (EXTERNAL_JPEG_LIBRARY)
#-------------------------------------------------------------------------------
-MACRO (PACKAGE_JPEG_LIBRARY compress_type)
+macro (PACKAGE_JPEG_LIBRARY compress_type)
add_custom_target (JPEG-GenHeader-Copy ALL
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${JPEG_INCLUDE_DIR_GEN}/jconfig.h ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/
COMMENT "Copying ${JPEG_INCLUDE_DIR_GEN}/jconfig.h to ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/"
@@ -78,18 +85,18 @@ MACRO (PACKAGE_JPEG_LIBRARY compress_type)
if (${compress_type} MATCHES "SVN" OR ${compress_type} MATCHES "TGZ")
add_dependencies (JPEG-GenHeader-Copy JPEG)
endif (${compress_type} MATCHES "SVN" OR ${compress_type} MATCHES "TGZ")
-ENDMACRO (PACKAGE_JPEG_LIBRARY)
+endmacro (PACKAGE_JPEG_LIBRARY)
#-------------------------------------------------------------------------------
-MACRO (EXTERNAL_SZIP_LIBRARY compress_type libtype encoding)
+macro (EXTERNAL_SZIP_LIBRARY compress_type encoding)
if (${compress_type} MATCHES "SVN")
EXTERNALPROJECT_ADD (SZIP
SVN_REPOSITORY ${SZIP_URL}
- # [SVN_REVISION rev]
+ # [SVN_REVISION rev]
INSTALL_COMMAND ""
CMAKE_ARGS
-DBUILD_SHARED_LIBS:BOOL=${BUILD_SHARED_LIBS}
- -DHDF_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
+ -DSZIP_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
-DSZIP_EXTERNALLY_CONFIGURED:BOOL=OFF
-DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX}
@@ -98,15 +105,15 @@ MACRO (EXTERNAL_SZIP_LIBRARY compress_type libtype encoding)
-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}
-DCMAKE_ANSI_CFLAGS:STRING=${CMAKE_ANSI_CFLAGS}
-DSZIP_ENABLE_ENCODING:BOOL=${encoding}
- )
+ )
elseif (${compress_type} MATCHES "GIT")
EXTERNALPROJECT_ADD (SZIP
GIT_REPOSITORY ${SZIP_URL}
- # [SVN_REVISION rev]
+ # [SVN_REVISION rev]
INSTALL_COMMAND ""
CMAKE_ARGS
-DBUILD_SHARED_LIBS:BOOL=${BUILD_SHARED_LIBS}
- -DHDF_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
+ -DSZIP_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
-DSZIP_EXTERNALLY_CONFIGURED:BOOL=OFF
-DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX}
@@ -115,7 +122,7 @@ MACRO (EXTERNAL_SZIP_LIBRARY compress_type libtype encoding)
-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}
-DCMAKE_ANSI_CFLAGS:STRING=${CMAKE_ANSI_CFLAGS}
-DSZIP_ENABLE_ENCODING:BOOL=${encoding}
- )
+ )
elseif (${compress_type} MATCHES "TGZ")
EXTERNALPROJECT_ADD (SZIP
URL ${SZIP_URL}
@@ -123,7 +130,7 @@ MACRO (EXTERNAL_SZIP_LIBRARY compress_type libtype encoding)
INSTALL_COMMAND ""
CMAKE_ARGS
-DBUILD_SHARED_LIBS:BOOL=${BUILD_SHARED_LIBS}
- -DHDF_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
+ -DSZIP_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
-DSZIP_EXTERNALLY_CONFIGURED:BOOL=OFF
-DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX}
@@ -132,27 +139,34 @@ MACRO (EXTERNAL_SZIP_LIBRARY compress_type libtype encoding)
-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}
-DCMAKE_ANSI_CFLAGS:STRING=${CMAKE_ANSI_CFLAGS}
-DSZIP_ENABLE_ENCODING:BOOL=${encoding}
- )
+ )
endif (${compress_type} MATCHES "SVN")
- externalproject_get_property (SZIP BINARY_DIR SOURCE_DIR)
+ externalproject_get_property (SZIP BINARY_DIR SOURCE_DIR)
- # Create imported target szip
- add_library (szip ${libtype} IMPORTED)
- HDF_IMPORT_SET_LIB_OPTIONS (szip "szip" ${libtype} "")
- add_dependencies (SZIP szip)
-
-# include (${BINARY_DIR}/SZIP-targets.cmake)
- set (SZIP_LIBRARY "szip")
+##include (${BINARY_DIR}/${SZIP_PACKAGE_NAME}${HDF_PACKAGE_EXT}-targets.cmake)
+# Create imported target szip-static
+ add_library(szip-static STATIC IMPORTED)
+ HDF_IMPORT_SET_LIB_OPTIONS (szip-static "szip" STATIC "")
+ add_dependencies (SZIP szip-static)
+ set (SZIP_STATIC_LIBRARY "szip-static")
+ set (SZIP_LIBRARIES ${SZIP_static_LIBRARY})
+ if (BUILD_SHARED_LIBS)
+ # Create imported target szip-shared
+ add_library(szip-shared SHARED IMPORTED)
+ HDF_IMPORT_SET_LIB_OPTIONS (szip-shared "szip" SHARED "")
+ add_dependencies (SZIP szip-shared)
+ set (SZIP_SHARED_LIBRARY "szip-shared")
+ set (SZIP_LIBRARIES ${SZIP_LIBRARIES} ${SZIP_shared_LIBRARY})
+ endif (BUILD_SHARED_LIBS)
set (SZIP_INCLUDE_DIR_GEN "${BINARY_DIR}")
set (SZIP_INCLUDE_DIR "${SOURCE_DIR}/src")
set (SZIP_FOUND 1)
- set (SZIP_LIBRARIES ${SZIP_LIBRARY})
set (SZIP_INCLUDE_DIRS ${SZIP_INCLUDE_DIR_GEN} ${SZIP_INCLUDE_DIR})
-ENDMACRO (EXTERNAL_SZIP_LIBRARY)
+endmacro (EXTERNAL_SZIP_LIBRARY)
#-------------------------------------------------------------------------------
-MACRO (PACKAGE_SZIP_LIBRARY compress_type)
+macro (PACKAGE_SZIP_LIBRARY compress_type)
add_custom_target (SZIP-GenHeader-Copy ALL
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${SZIP_INCLUDE_DIR_GEN}/SZconfig.h ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/
COMMENT "Copying ${SZIP_INCLUDE_DIR_GEN}/SZconfig.h to ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/"
@@ -161,18 +175,18 @@ MACRO (PACKAGE_SZIP_LIBRARY compress_type)
if (${compress_type} MATCHES "SVN" OR ${compress_type} MATCHES "TGZ")
add_dependencies (SZIP-GenHeader-Copy SZIP)
endif (${compress_type} MATCHES "SVN" OR ${compress_type} MATCHES "TGZ")
-ENDMACRO (PACKAGE_SZIP_LIBRARY)
+endmacro (PACKAGE_SZIP_LIBRARY)
#-------------------------------------------------------------------------------
-MACRO (EXTERNAL_ZLIB_LIBRARY compress_type libtype)
+macro (EXTERNAL_ZLIB_LIBRARY compress_type)
if (${compress_type} MATCHES "SVN")
EXTERNALPROJECT_ADD (ZLIB
SVN_REPOSITORY ${ZLIB_URL}
- # [SVN_REVISION rev]
+ # [SVN_REVISION rev]
INSTALL_COMMAND ""
CMAKE_ARGS
-DBUILD_SHARED_LIBS:BOOL=${BUILD_SHARED_LIBS}
- -DHDF_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
+ -DZLIB_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
-DZLIB_EXTERNALLY_CONFIGURED:BOOL=OFF
-DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX}
@@ -180,15 +194,15 @@ MACRO (EXTERNAL_ZLIB_LIBRARY compress_type libtype)
-DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}
-DCMAKE_ANSI_CFLAGS:STRING=${CMAKE_ANSI_CFLAGS}
- )
+ )
elseif (${compress_type} MATCHES "GIT")
EXTERNALPROJECT_ADD (ZLIB
GIT_REPOSITORY ${ZLIB_URL}
- # [SVN_REVISION rev]
+ # [SVN_REVISION rev]
INSTALL_COMMAND ""
CMAKE_ARGS
-DBUILD_SHARED_LIBS:BOOL=${BUILD_SHARED_LIBS}
- -DHDF_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
+ -DZLIB_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
-DZLIB_EXTERNALLY_CONFIGURED:BOOL=OFF
-DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX}
@@ -196,7 +210,7 @@ MACRO (EXTERNAL_ZLIB_LIBRARY compress_type libtype)
-DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}
-DCMAKE_ANSI_CFLAGS:STRING=${CMAKE_ANSI_CFLAGS}
- )
+ )
elseif (${compress_type} MATCHES "TGZ")
EXTERNALPROJECT_ADD (ZLIB
URL ${ZLIB_URL}
@@ -204,7 +218,7 @@ MACRO (EXTERNAL_ZLIB_LIBRARY compress_type libtype)
INSTALL_COMMAND ""
CMAKE_ARGS
-DBUILD_SHARED_LIBS:BOOL=${BUILD_SHARED_LIBS}
- -DHDF_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
+ -DZLIB_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
-DZLIB_EXTERNALLY_CONFIGURED:BOOL=OFF
-DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX}
@@ -212,32 +226,39 @@ MACRO (EXTERNAL_ZLIB_LIBRARY compress_type libtype)
-DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}
-DCMAKE_ANSI_CFLAGS:STRING=${CMAKE_ANSI_CFLAGS}
- )
+ )
endif (${compress_type} MATCHES "SVN")
- externalproject_get_property (ZLIB BINARY_DIR SOURCE_DIR)
+ externalproject_get_property (ZLIB BINARY_DIR SOURCE_DIR)
- # Create imported target zlib
- add_library (zlib ${libtype} IMPORTED)
if (WIN32)
set (ZLIB_LIB_NAME "zlib")
else (WIN32)
set (ZLIB_LIB_NAME "z")
endif (WIN32)
- HDF_IMPORT_SET_LIB_OPTIONS (zlib ${ZLIB_LIB_NAME} ${libtype} "")
- add_dependencies (ZLIB zlib)
+##include (${BINARY_DIR}/${ZLIB_PACKAGE_NAME}${HDF_PACKAGE_EXT}-targets.cmake)
+# Create imported target zlib-static
+ add_library(zlib-static STATIC IMPORTED)
+ HDF_IMPORT_SET_LIB_OPTIONS (zlib-static ${ZLIB_LIB_NAME} STATIC "")
+ add_dependencies (ZLIB zlib-static)
+ set (ZLIB_STATIC_LIBRARY "zlib-static")
+ set (ZLIB_LIBRARIES ${ZLIB_static_LIBRARY})
+ if (BUILD_SHARED_LIBS)
+ # Create imported target zlib-shared
+ add_library(zlib-shared SHARED IMPORTED)
+ HDF_IMPORT_SET_LIB_OPTIONS (zlib-shared ${ZLIB_LIB_NAME} SHARED "")
+ add_dependencies (ZLIB zlib-shared)
+ set (ZLIB_SHARED_LIBRARY "zlib-shared")
+ set (ZLIB_LIBRARIES ${ZLIB_LIBRARIES} ${ZLIB_SHARED_LIBRARY})
+ endif (BUILD_SHARED_LIBS)
-# include (${BINARY_DIR}/ZLIB-targets.cmake)
- set (ZLIB_LIBRARY "zlib")
-
set (ZLIB_INCLUDE_DIR_GEN "${BINARY_DIR}")
set (ZLIB_INCLUDE_DIR "${SOURCE_DIR}")
set (ZLIB_FOUND 1)
- set (ZLIB_LIBRARIES ${ZLIB_LIBRARY})
set (ZLIB_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR_GEN} ${ZLIB_INCLUDE_DIR})
-ENDMACRO (EXTERNAL_ZLIB_LIBRARY)
+endmacro (EXTERNAL_ZLIB_LIBRARY)
#-------------------------------------------------------------------------------
-MACRO (PACKAGE_ZLIB_LIBRARY compress_type)
+macro (PACKAGE_ZLIB_LIBRARY compress_type)
add_custom_target (ZLIB-GenHeader-Copy ALL
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${ZLIB_INCLUDE_DIR_GEN}/zconf.h ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/
COMMENT "Copying ${ZLIB_INCLUDE_DIR_GEN}/zconf.h to ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/"
@@ -246,4 +267,4 @@ MACRO (PACKAGE_ZLIB_LIBRARY compress_type)
if (${compress_type} MATCHES "SVN" OR ${compress_type} MATCHES "TGZ")
add_dependencies (ZLIB-GenHeader-Copy ZLIB)
endif (${compress_type} MATCHES "SVN" OR ${compress_type} MATCHES "TGZ")
-ENDMACRO (PACKAGE_ZLIB_LIBRARY)
+endmacro (PACKAGE_ZLIB_LIBRARY)
diff --git a/config/cmake_ext_mod/HDFMacros.cmake b/config/cmake_ext_mod/HDFMacros.cmake
index f6e9b91..b2c4997 100644
--- a/config/cmake_ext_mod/HDFMacros.cmake
+++ b/config/cmake_ext_mod/HDFMacros.cmake
@@ -1,10 +1,10 @@
#-------------------------------------------------------------------------------
-MACRO (SET_GLOBAL_VARIABLE name value)
+macro (SET_GLOBAL_VARIABLE name value)
set (${name} ${value} CACHE INTERNAL "Used to pass variables between directories" FORCE)
-ENDMACRO (SET_GLOBAL_VARIABLE)
+endmacro (SET_GLOBAL_VARIABLE)
#-------------------------------------------------------------------------------
-MACRO (IDE_GENERATED_PROPERTIES SOURCE_PATH HEADERS SOURCES)
+macro (IDE_GENERATED_PROPERTIES SOURCE_PATH HEADERS SOURCES)
#set(source_group_path "Source/AIM/${NAME}")
string (REPLACE "/" "\\\\" source_group_path ${SOURCE_PATH})
source_group (${source_group_path} FILES ${HEADERS} ${SOURCES})
@@ -14,13 +14,13 @@ MACRO (IDE_GENERATED_PROPERTIES SOURCE_PATH HEADERS SOURCES)
#set_property (SOURCE ${HEADERS}
# PROPERTY MACOSX_PACKAGE_LOCATION Headers/${NAME}
#)
-ENDMACRO (IDE_GENERATED_PROPERTIES)
+endmacro (IDE_GENERATED_PROPERTIES)
#-------------------------------------------------------------------------------
-MACRO (IDE_SOURCE_PROPERTIES SOURCE_PATH HEADERS SOURCES)
+macro (IDE_SOURCE_PROPERTIES SOURCE_PATH HEADERS SOURCES)
# install (FILES ${HEADERS}
# DESTINATION include/R3D/${NAME}
- # COMPONENT Headers
+ # COMPONENT Headers
# )
string (REPLACE "/" "\\\\" source_group_path ${SOURCE_PATH} )
@@ -31,19 +31,17 @@ MACRO (IDE_SOURCE_PROPERTIES SOURCE_PATH HEADERS SOURCES)
#set_property (SOURCE ${HEADERS}
# PROPERTY MACOSX_PACKAGE_LOCATION Headers/${NAME}
#)
-ENDMACRO (IDE_SOURCE_PROPERTIES)
+endmacro (IDE_SOURCE_PROPERTIES)
#-------------------------------------------------------------------------------
-MACRO (TARGET_NAMING libtarget libtype)
- if (WIN32)
- if (${libtype} MATCHES "SHARED")
- set_target_properties (${libtarget} PROPERTIES OUTPUT_NAME "${libtarget}dll")
- endif (${libtype} MATCHES "SHARED")
- endif (WIN32)
-ENDMACRO (TARGET_NAMING)
+macro (TARGET_NAMING libtarget libtype)
+ if (${libtype} MATCHES "SHARED")
+ set_target_properties (${libtarget} PROPERTIES OUTPUT_NAME "${libtarget}${ARGN}")
+ endif (${libtype} MATCHES "SHARED")
+endmacro (TARGET_NAMING)
#-------------------------------------------------------------------------------
-MACRO (INSTALL_TARGET_PDB libtarget targetdestination targetcomponent)
+macro (INSTALL_TARGET_PDB libtarget targetdestination targetcomponent)
if (WIN32 AND MSVC)
get_target_property (target_name ${libtarget} OUTPUT_NAME_RELWITHDEBINFO)
install (
@@ -55,10 +53,10 @@ MACRO (INSTALL_TARGET_PDB libtarget targetdestination targetcomponent)
COMPONENT ${targetcomponent}
)
endif (WIN32 AND MSVC)
-ENDMACRO (INSTALL_TARGET_PDB)
+endmacro (INSTALL_TARGET_PDB)
#-------------------------------------------------------------------------------
-MACRO (INSTALL_PROGRAM_PDB progtarget targetdestination targetcomponent)
+macro (INSTALL_PROGRAM_PDB progtarget targetdestination targetcomponent)
if (WIN32 AND MSVC)
get_target_property (target_name ${progtarget} OUTPUT_NAME_RELWITHDEBINFO)
get_target_property (target_prefix ${progtarget} PREFIX)
@@ -71,10 +69,10 @@ MACRO (INSTALL_PROGRAM_PDB progtarget targetdestination targetcomponent)
COMPONENT ${targetcomponent}
)
endif (WIN32 AND MSVC)
-ENDMACRO (INSTALL_PROGRAM_PDB)
+endmacro (INSTALL_PROGRAM_PDB)
#-------------------------------------------------------------------------------
-MACRO (HDF_SET_LIB_OPTIONS libtarget libname libtype)
+macro (HDF_SET_LIB_OPTIONS libtarget libname libtype)
if (${libtype} MATCHES "SHARED")
if (WIN32)
set (LIB_RELEASE_NAME "${libname}")
@@ -92,7 +90,7 @@ MACRO (HDF_SET_LIB_OPTIONS libtarget libname libtype)
set (LIB_DEBUG_NAME "${libname}_debug")
endif (WIN32)
endif (${libtype} MATCHES "SHARED")
-
+
set_target_properties (${libtarget}
PROPERTIES
OUTPUT_NAME_DEBUG ${LIB_DEBUG_NAME}
@@ -100,7 +98,7 @@ MACRO (HDF_SET_LIB_OPTIONS libtarget libname libtype)
OUTPUT_NAME_MINSIZEREL ${LIB_RELEASE_NAME}
OUTPUT_NAME_RELWITHDEBINFO ${LIB_RELEASE_NAME}
)
-
+
#----- Use MSVC Naming conventions for Shared Libraries
if (MINGW AND ${libtype} MATCHES "SHARED")
set_target_properties (${libtarget}
@@ -111,10 +109,10 @@ MACRO (HDF_SET_LIB_OPTIONS libtarget libname libtype)
)
endif (MINGW AND ${libtype} MATCHES "SHARED")
-ENDMACRO (HDF_SET_LIB_OPTIONS)
+endmacro (HDF_SET_LIB_OPTIONS)
#-------------------------------------------------------------------------------
-MACRO (HDF_IMPORT_SET_LIB_OPTIONS libtarget libname libtype libversion)
+macro (HDF_IMPORT_SET_LIB_OPTIONS libtarget libname libtype libversion)
HDF_SET_LIB_OPTIONS (${libtarget} ${libname} ${libtype})
if (${importtype} MATCHES "IMPORT")
@@ -167,83 +165,67 @@ MACRO (HDF_IMPORT_SET_LIB_OPTIONS libtarget libname libtype libversion)
endif (WIN32 AND NOT MINGW)
endif (${libtype} MATCHES "SHARED")
-ENDMACRO (HDF_IMPORT_SET_LIB_OPTIONS)
+endmacro (HDF_IMPORT_SET_LIB_OPTIONS)
#-------------------------------------------------------------------------------
-MACRO (TARGET_C_PROPERTIES wintarget addcompileflags addlinkflags)
+macro (TARGET_C_PROPERTIES wintarget libtype addcompileflags addlinkflags)
if (MSVC)
- TARGET_MSVC_PROPERTIES (${wintarget} "${addcompileflags} ${WIN_COMPILE_FLAGS}" "${addlinkflags} ${WIN_LINK_FLAGS}")
+ TARGET_MSVC_PROPERTIES (${wintarget} ${libtype} "${addcompileflags} ${WIN_COMPILE_FLAGS}" "${addlinkflags} ${WIN_LINK_FLAGS}")
else (MSVC)
- if (BUILD_SHARED_LIBS)
- set_target_properties (${wintarget}
- PROPERTIES
- COMPILE_FLAGS "${addcompileflags}"
- LINK_FLAGS "${addlinkflags}"
- )
- else (BUILD_SHARED_LIBS)
- set_target_properties (${wintarget}
- PROPERTIES
- COMPILE_FLAGS "${addcompileflags}"
- LINK_FLAGS "${addlinkflags}"
- )
- endif (BUILD_SHARED_LIBS)
+ set_target_properties (${wintarget}
+ PROPERTIES
+ COMPILE_FLAGS "${addcompileflags}"
+ LINK_FLAGS "${addlinkflags}"
+ )
endif (MSVC)
-ENDMACRO (TARGET_C_PROPERTIES)
+endmacro (TARGET_C_PROPERTIES)
#-------------------------------------------------------------------------------
-MACRO (TARGET_MSVC_PROPERTIES wintarget addcompileflags addlinkflags)
+macro (TARGET_MSVC_PROPERTIES wintarget libtype addcompileflags addlinkflags)
if (MSVC)
- if (BUILD_SHARED_LIBS)
- set_target_properties (${wintarget}
- PROPERTIES
- COMPILE_FLAGS "${addcompileflags}"
- LINK_FLAGS "${addlinkflags}"
- )
- else (BUILD_SHARED_LIBS)
- set_target_properties (${wintarget}
- PROPERTIES
- COMPILE_FLAGS "${addcompileflags}"
- LINK_FLAGS "${addlinkflags}"
- )
- endif (BUILD_SHARED_LIBS)
+ set_target_properties (${wintarget}
+ PROPERTIES
+ COMPILE_FLAGS "${addcompileflags}"
+ LINK_FLAGS "${addlinkflags}"
+ )
endif (MSVC)
-ENDMACRO (TARGET_MSVC_PROPERTIES)
+endmacro (TARGET_MSVC_PROPERTIES)
#-------------------------------------------------------------------------------
-MACRO (TARGET_FORTRAN_PROPERTIES forttarget addcompileflags addlinkflags)
+macro (TARGET_FORTRAN_PROPERTIES forttarget libtype addcompileflags addlinkflags)
if (WIN32)
- TARGET_FORTRAN_WIN_PROPERTIES (${forttarget} "${addcompileflags} ${WIN_COMPILE_FLAGS}" "${addlinkflags} ${WIN_LINK_FLAGS}")
+ TARGET_FORTRAN_WIN_PROPERTIES (${forttarget} ${libtype} "${addcompileflags} ${WIN_COMPILE_FLAGS}" "${addlinkflags} ${WIN_LINK_FLAGS}")
endif (WIN32)
-ENDMACRO (TARGET_FORTRAN_PROPERTIES)
+endmacro (TARGET_FORTRAN_PROPERTIES)
#-------------------------------------------------------------------------------
-MACRO (TARGET_FORTRAN_WIN_PROPERTIES forttarget addcompileflags addlinkflags)
+macro (TARGET_FORTRAN_WIN_PROPERTIES forttarget libtype addcompileflags addlinkflags)
if (MSVC)
- if (BUILD_SHARED_LIBS)
+ if (${libtype} MATCHES "SHARED")
set_target_properties (${forttarget}
PROPERTIES
COMPILE_FLAGS "/dll ${addcompileflags}"
LINK_FLAGS "/SUBSYSTEM:CONSOLE ${addlinkflags}"
- )
- else (BUILD_SHARED_LIBS)
+ )
+ else (${libtype} MATCHES "SHARED")
set_target_properties (${forttarget}
PROPERTIES
COMPILE_FLAGS "${addcompileflags}"
LINK_FLAGS "/SUBSYSTEM:CONSOLE ${addlinkflags}"
- )
- endif (BUILD_SHARED_LIBS)
+ )
+ endif (${libtype} MATCHES "SHARED")
endif (MSVC)
-ENDMACRO (TARGET_FORTRAN_WIN_PROPERTIES)
+endmacro (TARGET_FORTRAN_WIN_PROPERTIES)
#-----------------------------------------------------------------------------
# Configure the README.txt file for the binary package
#-----------------------------------------------------------------------------
-MACRO (HDF_README_PROPERTIES target_fortran)
+macro (HDF_README_PROPERTIES target_fortran)
set (BINARY_SYSTEM_NAME ${CMAKE_SYSTEM_NAME})
set (BINARY_PLATFORM "${CMAKE_SYSTEM_NAME}")
if (WIN32)
set (BINARY_EXAMPLE_ENDING "zip")
- set (BINARY_INSTALL_ENDING "exe")
+ set (BINARY_INSTALL_ENDING "msi")
if (CMAKE_CL_64)
set (BINARY_SYSTEM_NAME "win64")
else (CMAKE_CL_64)
@@ -253,6 +235,8 @@ MACRO (HDF_README_PROPERTIES target_fortran)
set (BINARY_PLATFORM "${BINARY_PLATFORM} 7")
elseif (${CMAKE_SYSTEM_VERSION} MATCHES "6.2")
set (BINARY_PLATFORM "${BINARY_PLATFORM} 8")
+ elseif (${CMAKE_SYSTEM_VERSION} MATCHES "6.3")
+ set (BINARY_PLATFORM "${BINARY_PLATFORM} 10")
endif (${CMAKE_SYSTEM_VERSION} MATCHES "6.1")
set (BINARY_PLATFORM "${BINARY_PLATFORM} ${MSVC_C_ARCHITECTURE_ID}")
if (${CMAKE_C_COMPILER_VERSION} MATCHES "16.*")
@@ -263,6 +247,8 @@ MACRO (HDF_README_PROPERTIES target_fortran)
set (BINARY_PLATFORM "${BINARY_PLATFORM}, using VISUAL STUDIO 2012")
elseif (${CMAKE_C_COMPILER_VERSION} MATCHES "18.*")
set (BINARY_PLATFORM "${BINARY_PLATFORM}, using VISUAL STUDIO 2013")
+ elseif (${CMAKE_C_COMPILER_VERSION} MATCHES "19.*")
+ set (BINARY_PLATFORM "${BINARY_PLATFORM}, using VISUAL STUDIO 2015")
else (${CMAKE_C_COMPILER_VERSION} MATCHES "16.*")
set (BINARY_PLATFORM "${BINARY_PLATFORM}, using VISUAL STUDIO ${CMAKE_C_COMPILER_VERSION}")
endif (${CMAKE_C_COMPILER_VERSION} MATCHES "16.*")
@@ -277,12 +263,19 @@ MACRO (HDF_README_PROPERTIES target_fortran)
set (BINARY_PLATFORM "${BINARY_PLATFORM} ${CMAKE_SYSTEM_VERSION} ${CMAKE_SYSTEM_PROCESSOR}")
set (BINARY_PLATFORM "${BINARY_PLATFORM}, using ${CMAKE_C_COMPILER_ID} C ${CMAKE_C_COMPILER_VERSION}")
endif (WIN32)
+
if (target_fortran)
set (BINARY_PLATFORM "${BINARY_PLATFORM} / ${CMAKE_Fortran_COMPILER_ID} Fortran")
endif (target_fortran)
-
+
+ if (BUILD_SHARED_LIBS)
+ set (LIB_TYPE "Static and Shared")
+ else (BUILD_SHARED_LIBS)
+ set (LIB_TYPE "Static")
+ endif (BUILD_SHARED_LIBS)
+
configure_file (
- ${HDF_RESOURCES_DIR}/README.txt.cmake.in
+ ${HDF_RESOURCES_DIR}/README.txt.cmake.in
${CMAKE_BINARY_DIR}/README.txt @ONLY
)
-ENDMACRO (HDF_README_PROPERTIES)
+endmacro (HDF_README_PROPERTIES)
diff --git a/config/cmake_ext_mod/HDFTests.c b/config/cmake_ext_mod/HDFTests.c
index 03aab43..8478d1b 100644
--- a/config/cmake_ext_mod/HDFTests.c
+++ b/config/cmake_ext_mod/HDFTests.c
@@ -213,7 +213,7 @@ SIMPLE_TEST(struct stat sb; sb.st_blocks=0);
int main(void)
{
- char *llwidthArgs[] = { "l64", "l", "L", "q", "ll", NULL };
+ char *llwidthArgs[] = { "I64", "l64", "l", "L", "q", "ll", NULL };
char *s = malloc(128);
char **currentArg = NULL;
LL_TYPE x = (LL_TYPE)1048576 * (LL_TYPE)1048576;
@@ -323,6 +323,21 @@ int main(void)
SIMPLE_TEST(posix_memalign());
#endif
+#ifdef HAVE_DEFAULT_SOURCE
+/* check default source */
+#include <features.h>
+
+int
+main(void)
+{
+#ifdef __GLIBC_PREREQ
+ return __GLIBC_PREREQ(2,19);
+#else
+ return 0;
+#endif /* defined(__GLIBC_PREREQ) */
+}
+#endif
+
#ifdef TEST_LFS_WORKS
/* Return 0 when LFS is available and 1 otherwise. */
#define _LARGEFILE_SOURCE
@@ -374,13 +389,6 @@ int main(void)
}
#endif
-#ifdef LONE_COLON
-int main(int argc, char * argv)
-{
- return 0;
-}
-#endif
-
#ifdef CXX_HAVE_OFFSETOF
#include <stdio.h>
@@ -446,13 +454,19 @@ SIMPLE_TEST(struct text_info w; w.screenwidth=0);
#endif /* HAVE_TM_GMTOFF */
-
-#if defined( INLINE_TEST_inline ) || defined( INLINE_TEST___inline__ ) || defined( INLINE_TEST___inline )
+#if defined( HAVE_INLINE ) || defined( HAVE___INLINE__ ) || defined( HAVE___INLINE )
#ifndef __cplusplus
+#if defined( HAVE_INLINE )
+# define INLINE_KW inline
+#elif defined ( HAVE___INLINE__ )
+# define INLINE_KW __inline__
+#elif defined ( HAVE___INLINE )
+# define INLINE_KW __inline
+#endif /* HAVE_INLINE */
typedef int foo_t;
-static INLINE_TEST_INLINE foo_t static_foo () { return 0; }
-INLINE_TEST_INLINE foo_t foo () {return 0; }
-int main() { return 0; }
-#endif
+static INLINE_KW foo_t static_foo () { return 0; }
+INLINE_KW foo_t foo () {return 0; }
+int main(void) { return 0; }
+#endif /* __cplusplus */
+#endif /* defined( HAVE_INLINE ) || defined( HAVE___INLINE__ ) || defined( HAVE___INLINE ) */
-#endif /* INLINE_TEST */
diff --git a/config/cmake_ext_mod/HDFUseFortran.cmake b/config/cmake_ext_mod/HDFUseFortran.cmake
index 4955d20..275f2ea 100644
--- a/config/cmake_ext_mod/HDFUseFortran.cmake
+++ b/config/cmake_ext_mod/HDFUseFortran.cmake
@@ -3,7 +3,7 @@
#
#-------------------------------------------------------------------------------
ENABLE_LANGUAGE (Fortran)
-
+
#-----------------------------------------------------------------------------
# Detect name mangling convention used between Fortran and C
#-----------------------------------------------------------------------------
@@ -12,7 +12,6 @@ FortranCInterface_HEADER (
${CMAKE_BINARY_DIR}/FCMangle.h
MACRO_NAMESPACE "H5_FC_"
SYMBOL_NAMESPACE "H5_FC_"
- SYMBOLS mysub mymod:my_sub
)
file (STRINGS ${CMAKE_BINARY_DIR}/FCMangle.h CONTENTS REGEX "H5_FC_GLOBAL\\(.*,.*\\) +(.*)")
@@ -28,7 +27,6 @@ set (H5_FC_FUNC_ "H5_FC_FUNC_(name,NAME) ${CMAKE_MATCH_1}")
# so this one is used for a sizeof test.
#-----------------------------------------------------------------------------
MACRO (CHECK_FORTRAN_FEATURE FUNCTION CODE VARIABLE)
- if (NOT DEFINED ${VARIABLE})
message (STATUS "Testing Fortran ${FUNCTION}")
if (CMAKE_REQUIRED_LIBRARIES)
set (CHECK_FUNCTION_EXISTS_ADD_LIBRARIES
@@ -37,12 +35,12 @@ MACRO (CHECK_FORTRAN_FEATURE FUNCTION CODE VARIABLE)
set (CHECK_FUNCTION_EXISTS_ADD_LIBRARIES)
endif (CMAKE_REQUIRED_LIBRARIES)
file (WRITE
- ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testFortranCompiler.f
+ ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testFortranCompiler.f90
"${CODE}"
)
- TRY_COMPILE (${VARIABLE}
+ TRY_COMPILE (RESULT_VAR
${CMAKE_BINARY_DIR}
- ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testFortranCompiler.f
+ ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testFortranCompiler.f90
CMAKE_FLAGS "${CHECK_FUNCTION_EXISTS_ADD_LIBRARIES}"
OUTPUT_VARIABLE OUTPUT
)
@@ -51,21 +49,21 @@ MACRO (CHECK_FORTRAN_FEATURE FUNCTION CODE VARIABLE)
# message ( "Test result ${OUTPUT}")
# message ( "* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * ")
- if (${VARIABLE})
+ if (${RESULT_VAR})
set (${VARIABLE} 1 CACHE INTERNAL "Have Fortran function ${FUNCTION}")
message (STATUS "Testing Fortran ${FUNCTION} - OK")
file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log
"Determining if the Fortran ${FUNCTION} exists passed with the following output:\n"
"${OUTPUT}\n\n"
)
- else (${VARIABLE})
+ else ()
message (STATUS "Testing Fortran ${FUNCTION} - Fail")
- set (${VARIABLE} "" CACHE INTERNAL "Have Fortran function ${FUNCTION}")
+ set (${VARIABLE} 0 CACHE INTERNAL "Have Fortran function ${FUNCTION}")
file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
"Determining if the Fortran ${FUNCTION} exists failed with the following output:\n"
"${OUTPUT}\n\n")
- endif (${VARIABLE})
- endif (NOT DEFINED ${VARIABLE})
+ endif ()
+
ENDMACRO (CHECK_FORTRAN_FEATURE)
#-----------------------------------------------------------------------------
@@ -77,6 +75,7 @@ ENDMACRO (CHECK_FORTRAN_FEATURE)
#-----------------------------------------------------------------------------
# Check for Non-standard extension intrinsic function SIZEOF
+set(FORTRAN_HAVE_SIZEOF FALSE)
CHECK_FORTRAN_FEATURE(sizeof
"
PROGRAM main
@@ -87,6 +86,7 @@ CHECK_FORTRAN_FEATURE(sizeof
)
# Check for F2008 standard intrinsic function C_SIZEOF
+set(FORTRAN_HAVE_C_SIZEOF FALSE)
CHECK_FORTRAN_FEATURE(c_sizeof
"
PROGRAM main
@@ -111,7 +111,18 @@ CHECK_FORTRAN_FEATURE(storage_size
FORTRAN_HAVE_STORAGE_SIZE
)
+# Check for F2008 standard intrinsic module "ISO_FORTRAN_ENV"
+set(HAVE_ISO_FORTRAN_ENV FALSE)
+CHECK_FORTRAN_FEATURE(ISO_FORTRAN_ENV
+ "
+ PROGRAM main
+ USE, INTRINSIC :: ISO_FORTRAN_ENV
+ END PROGRAM
+ "
+ HAVE_ISO_FORTRAN_ENV
+)
+set(FORTRAN_DEFAULT_REAL_NOT_DOUBLE FALSE)
CHECK_FORTRAN_FEATURE(RealIsNotDouble
"
MODULE type_mod
@@ -141,6 +152,7 @@ CHECK_FORTRAN_FEATURE(RealIsNotDouble
#-----------------------------------------------------------------------------
# Checks if the ISO_C_BINDING module meets all the requirements
#-----------------------------------------------------------------------------
+set(FORTRAN_HAVE_ISO_C_BINDING FALSE)
CHECK_FORTRAN_FEATURE(iso_c_binding
"
PROGRAM main
@@ -148,7 +160,7 @@ CHECK_FORTRAN_FEATURE(iso_c_binding
IMPLICIT NONE
TYPE(C_PTR) :: ptr
TYPE(C_FUNPTR) :: funptr
- INTEGER(C_INT64_T) :: c_int64_type
+ INTEGER(C_INT64_T) :: c_int64_type
CHARACTER(LEN=80, KIND=c_char), TARGET :: ichr
ptr = C_LOC(ichr(1:1))
END PROGRAM
diff --git a/config/cmake_ext_mod/grepTest.cmake b/config/cmake_ext_mod/grepTest.cmake
index 579d855..a090057 100644
--- a/config/cmake_ext_mod/grepTest.cmake
+++ b/config/cmake_ext_mod/grepTest.cmake
@@ -27,13 +27,13 @@ endif (NOT TEST_REFERENCE)
message (STATUS "COMMAND: ${TEST_PROGRAM} ${TEST_ARGS}")
# run the test program, capture the stdout/stderr and the result var
-EXECUTE_PROCESS (
+execute_process (
COMMAND ${TEST_PROGRAM} ${TEST_ARGS}
WORKING_DIRECTORY ${TEST_FOLDER}
RESULT_VARIABLE TEST_RESULT
OUTPUT_FILE ${TEST_OUTPUT}
ERROR_FILE ${TEST_OUTPUT}.err
- OUTPUT_VARIABLE TEST_ERROR
+ OUTPUT_VARIABLE TEST_OUT
ERROR_VARIABLE TEST_ERROR
)
@@ -44,16 +44,16 @@ message (STATUS "COMMAND Error: ${TEST_ERROR}")
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
# TEST_REFERENCE should always be matched
-STRING(REGEX MATCH "${TEST_REFERENCE}" TEST_MATCH ${TEST_STREAM})
-STRING(COMPARE EQUAL "${TEST_REFERENCE}" "${TEST_MATCH}" TEST_RESULT)
+string (REGEX MATCH "${TEST_REFERENCE}" TEST_MATCH ${TEST_STREAM})
+string (COMPARE EQUAL "${TEST_REFERENCE}" "${TEST_MATCH}" TEST_RESULT)
if (${TEST_RESULT} STREQUAL "0")
message (FATAL_ERROR "Failed: The output of ${TEST_PROGRAM} did not contain ${TEST_REFERENCE}")
endif (${TEST_RESULT} STREQUAL "0")
-STRING(REGEX MATCH "${TEST_FILTER}" TEST_MATCH ${TEST_STREAM})
+string (REGEX MATCH "${TEST_FILTER}" TEST_MATCH ${TEST_STREAM})
if (${TEST_EXPECT} STREQUAL "1")
# TEST_EXPECT (1) interperts TEST_FILTER as NOT to match
- STRING(LENGTH "${TEST_MATCH}" TEST_RESULT)
+ string (LENGTH "${TEST_MATCH}" TEST_RESULT)
if (NOT ${TEST_RESULT} STREQUAL "0")
message (FATAL_ERROR "Failed: The output of ${TEST_PROGRAM} did contain ${TEST_FILTER}")
endif (NOT ${TEST_RESULT} STREQUAL "0")
diff --git a/config/cmake_ext_mod/prunTest.cmake b/config/cmake_ext_mod/prunTest.cmake
index 3d5ec31..38ecb7e 100644
--- a/config/cmake_ext_mod/prunTest.cmake
+++ b/config/cmake_ext_mod/prunTest.cmake
@@ -30,7 +30,7 @@ set (ERROR_APPEND 1)
message (STATUS "COMMAND: ${TEST_PROGRAM} ${TEST_ARGS}")
if (TEST_ENV_VAR)
- set (ENV{${TEST_ENV_VAR}} "${TEST_ENV_VALUE}")
+ set (ENV{${TEST_ENV_VAR}} "${TEST_ENV_VALUE}")
endif (TEST_ENV_VAR)
# run the test program, capture the stdout/stderr and the result var
@@ -40,7 +40,7 @@ EXECUTE_PROCESS (
RESULT_VARIABLE TEST_RESULT
OUTPUT_FILE ${TEST_OUTPUT}
ERROR_FILE ${TEST_OUTPUT}.err
- OUTPUT_VARIABLE TEST_ERROR
+ OUTPUT_VARIABLE TEST_OUT
ERROR_VARIABLE TEST_ERROR
)
@@ -49,51 +49,51 @@ message (STATUS "COMMAND Result: ${TEST_RESULT}")
file (READ ${TEST_FOLDER}/${TEST_REFERENCE} TEST_STREAM)
file (WRITE ${TEST_FOLDER}/P_${TEST_REFERENCE} "${TEST_STREAM}")
-if (ERROR_APPEND)
+if (ERROR_APPEND AND EXISTS ${TEST_FOLDER}/${TEST_OUTPUT}.err)
file (READ ${TEST_FOLDER}/${TEST_OUTPUT}.err TEST_STREAM)
- file (APPEND ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
-endif (ERROR_APPEND)
+ file (APPEND ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
+endif (ERROR_APPEND AND EXISTS ${TEST_FOLDER}/${TEST_OUTPUT}.err)
if (TEST_APPEND)
- file (APPEND ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_APPEND} ${TEST_ERROR}\n")
+ file (APPEND ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_APPEND} ${TEST_ERROR}\n")
endif (TEST_APPEND)
message (STATUS "COMMAND Error: ${TEST_ERROR}")
if (TEST_MASK)
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
- STRING(REGEX REPLACE "Storage:[^\n]+\n" "Storage: <details removed for portability>\n" TEST_STREAM "${TEST_STREAM}")
+ STRING(REGEX REPLACE "Storage:[^\n]+\n" "Storage: <details removed for portability>\n" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
endif (TEST_MASK)
if (TEST_MASK_MOD)
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
- STRING(REGEX REPLACE "Modified:[^\n]+\n" "Modified: XXXX-XX-XX XX:XX:XX XXX\n" TEST_STREAM "${TEST_STREAM}")
+ STRING(REGEX REPLACE "Modified:[^\n]+\n" "Modified: XXXX-XX-XX XX:XX:XX XXX\n" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
endif (TEST_MASK_MOD)
if (TEST_MASK_ERROR)
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
- STRING(REGEX REPLACE "thread [0-9]*:" "thread (IDs):" TEST_STREAM "${TEST_STREAM}")
- STRING(REGEX REPLACE ": ([^\n]*)[.]c " ": (file name) " TEST_STREAM "${TEST_STREAM}")
- STRING(REGEX REPLACE " line [0-9]*" " line (number)" TEST_STREAM "${TEST_STREAM}")
- STRING(REGEX REPLACE "v[1-9]*[.][0-9]*[.]" "version (number)." TEST_STREAM "${TEST_STREAM}")
- STRING(REGEX REPLACE "[1-9]*[.][0-9]*[.][0-9]*[^)]*" "version (number)" TEST_STREAM "${TEST_STREAM}")
- STRING(REGEX REPLACE "H5Eget_auto[1-2]*" "H5Eget_auto(1 or 2)" TEST_STREAM "${TEST_STREAM}")
- STRING(REGEX REPLACE "H5Eset_auto[1-2]*" "H5Eset_auto(1 or 2)" TEST_STREAM "${TEST_STREAM}")
+ STRING(REGEX REPLACE "thread [0-9]*:" "thread (IDs):" TEST_STREAM "${TEST_STREAM}")
+ STRING(REGEX REPLACE ": ([^\n]*)[.]c " ": (file name) " TEST_STREAM "${TEST_STREAM}")
+ STRING(REGEX REPLACE " line [0-9]*" " line (number)" TEST_STREAM "${TEST_STREAM}")
+ STRING(REGEX REPLACE "v[1-9]*[.][0-9]*[.]" "version (number)." TEST_STREAM "${TEST_STREAM}")
+ STRING(REGEX REPLACE "[1-9]*[.][0-9]*[.][0-9]*[^)]*" "version (number)" TEST_STREAM "${TEST_STREAM}")
+ STRING(REGEX REPLACE "H5Eget_auto[1-2]*" "H5Eget_auto(1 or 2)" TEST_STREAM "${TEST_STREAM}")
+ STRING(REGEX REPLACE "H5Eset_auto[1-2]*" "H5Eset_auto(1 or 2)" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
endif (TEST_MASK_ERROR)
if (TEST_FILTER)
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
- STRING(REGEX REPLACE "${TEST_FILTER}" "" TEST_STREAM "${TEST_STREAM}")
+ STRING(REGEX REPLACE "${TEST_FILTER}" "" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
endif (TEST_FILTER)
#if (TEST_REF_FILTER)
# message (STATUS "TEST_REF_FILTER: ${TEST_APPEND}${TEST_REF_FILTER}")
# file (READ ${TEST_FOLDER}/P_${TEST_REFERENCE} TEST_STREAM)
-# STRING(REGEX REPLACE "${TEST_APPEND}" "${TEST_REF_FILTER}" TEST_STREAM "${TEST_STREAM}")
+# STRING(REGEX REPLACE "${TEST_APPEND}" "${TEST_REF_FILTER}" TEST_STREAM "${TEST_STREAM}")
# file (WRITE ${TEST_FOLDER}/P_${TEST_REFERENCE} "${TEST_STREAM}")
#endif (TEST_REF_FILTER)
diff --git a/config/cmake_ext_mod/runTest.cmake b/config/cmake_ext_mod/runTest.cmake
index bfaae2b..21a65e6 100644
--- a/config/cmake_ext_mod/runTest.cmake
+++ b/config/cmake_ext_mod/runTest.cmake
@@ -25,6 +25,7 @@ if (NOT TEST_SKIP_COMPARE AND NOT TEST_REFERENCE)
message (FATAL_ERROR "Require TEST_REFERENCE to be defined")
endif (NOT TEST_SKIP_COMPARE AND NOT TEST_REFERENCE)
+# if there is not an error reference file add the error output to the stdout file
if (NOT TEST_ERRREF)
set (ERROR_APPEND 1)
endif (NOT TEST_ERRREF)
@@ -32,43 +33,45 @@ endif (NOT TEST_ERRREF)
message (STATUS "COMMAND: ${TEST_PROGRAM} ${TEST_ARGS}")
if (TEST_ENV_VAR)
- set (ENV{${TEST_ENV_VAR}} "${TEST_ENV_VALUE}")
+ set (ENV{${TEST_ENV_VAR}} "${TEST_ENV_VALUE}")
endif (TEST_ENV_VAR)
if (NOT TEST_INPUT)
# run the test program, capture the stdout/stderr and the result var
- EXECUTE_PROCESS (
+ execute_process (
COMMAND ${TEST_PROGRAM} ${TEST_ARGS}
WORKING_DIRECTORY ${TEST_FOLDER}
RESULT_VARIABLE TEST_RESULT
OUTPUT_FILE ${TEST_OUTPUT}
ERROR_FILE ${TEST_OUTPUT}.err
- OUTPUT_VARIABLE TEST_ERROR
+ OUTPUT_VARIABLE TEST_OUT
ERROR_VARIABLE TEST_ERROR
)
else (NOT TEST_INPUT)
# run the test program with stdin, capture the stdout/stderr and the result var
- EXECUTE_PROCESS (
+ execute_process (
COMMAND ${TEST_PROGRAM} ${TEST_ARGS}
WORKING_DIRECTORY ${TEST_FOLDER}
RESULT_VARIABLE TEST_RESULT
INPUT_FILE ${TEST_INPUT}
OUTPUT_FILE ${TEST_OUTPUT}
ERROR_FILE ${TEST_OUTPUT}.err
- OUTPUT_VARIABLE TEST_ERROR
+ OUTPUT_VARIABLE TEST_OUT
ERROR_VARIABLE TEST_ERROR
)
endif (NOT TEST_INPUT)
message (STATUS "COMMAND Result: ${TEST_RESULT}")
-if (ERROR_APPEND)
+# if the .err file exists and ERRROR_APPEND is enabled
+if (ERROR_APPEND AND EXISTS ${TEST_FOLDER}/${TEST_OUTPUT}.err)
file (READ ${TEST_FOLDER}/${TEST_OUTPUT}.err TEST_STREAM)
- file (APPEND ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
-endif (ERROR_APPEND)
+ file (APPEND ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
+endif (ERROR_APPEND AND EXISTS ${TEST_FOLDER}/${TEST_OUTPUT}.err)
+# append the test result status with a predefined text
if (TEST_APPEND)
- file (APPEND ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_APPEND} ${TEST_RESULT}\n")
+ file (APPEND ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_APPEND} ${TEST_RESULT}\n")
endif (TEST_APPEND)
# if the return value is !=${TEST_EXPECT} bail out
@@ -78,44 +81,52 @@ endif (NOT ${TEST_RESULT} STREQUAL ${TEST_EXPECT})
message (STATUS "COMMAND Error: ${TEST_ERROR}")
+# if the output file needs Storage text removed
if (TEST_MASK)
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
- STRING(REGEX REPLACE "Storage:[^\n]+\n" "Storage: <details removed for portability>\n" TEST_STREAM "${TEST_STREAM}")
+ string (REGEX REPLACE "Storage:[^\n]+\n" "Storage: <details removed for portability>\n" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
endif (TEST_MASK)
+# if the output file needs Modified text removed
if (TEST_MASK_MOD)
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
- STRING(REGEX REPLACE "Modified:[^\n]+\n" "Modified: XXXX-XX-XX XX:XX:XX XXX\n" TEST_STREAM "${TEST_STREAM}")
+ string (REGEX REPLACE "Modified:[^\n]+\n" "Modified: XXXX-XX-XX XX:XX:XX XXX\n" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
endif (TEST_MASK_MOD)
+# if the output file or the .err file needs to mask out error stack info
if (TEST_MASK_ERROR)
if (NOT TEST_ERRREF)
+ # the error stack has been appended to the output file
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
- else (NOT TEST_ERRREF)
+ else ()
+ # the error stack remains in the .err file
file (READ ${TEST_FOLDER}/${TEST_OUTPUT}.err TEST_STREAM)
- endif (NOT TEST_ERRREF)
- STRING(REGEX REPLACE "thread [0-9]*:" "thread (IDs):" TEST_STREAM "${TEST_STREAM}")
- STRING(REGEX REPLACE ": ([^\n]*)[.]c " ": (file name) " TEST_STREAM "${TEST_STREAM}")
- STRING(REGEX REPLACE " line [0-9]*" " line (number)" TEST_STREAM "${TEST_STREAM}")
- STRING(REGEX REPLACE "v[1-9]*[.][0-9]*[.]" "version (number)." TEST_STREAM "${TEST_STREAM}")
- STRING(REGEX REPLACE "[1-9]*[.][0-9]*[.][0-9]*[^)]*" "version (number)" TEST_STREAM "${TEST_STREAM}")
- STRING(REGEX REPLACE "H5Eget_auto[1-2]*" "H5Eget_auto(1 or 2)" TEST_STREAM "${TEST_STREAM}")
- STRING(REGEX REPLACE "H5Eset_auto[1-2]*" "H5Eset_auto(1 or 2)" TEST_STREAM "${TEST_STREAM}")
+ endif ()
+ string (REGEX REPLACE "thread [0-9]*:" "thread (IDs):" TEST_STREAM "${TEST_STREAM}")
+ string (REGEX REPLACE ": ([^\n]*)[.]c " ": (file name) " TEST_STREAM "${TEST_STREAM}")
+ string (REGEX REPLACE " line [0-9]*" " line (number)" TEST_STREAM "${TEST_STREAM}")
+ string (REGEX REPLACE "v[1-9]*[.][0-9]*[.]" "version (number)." TEST_STREAM "${TEST_STREAM}")
+ string (REGEX REPLACE "[1-9]*[.][0-9]*[.][0-9]*[^)]*" "version (number)" TEST_STREAM "${TEST_STREAM}")
+ string (REGEX REPLACE "H5Eget_auto[1-2]*" "H5Eget_auto(1 or 2)" TEST_STREAM "${TEST_STREAM}")
+ string (REGEX REPLACE "H5Eset_auto[1-2]*" "H5Eset_auto(1 or 2)" TEST_STREAM "${TEST_STREAM}")
+ # write back the changes to the original files
if (NOT TEST_ERRREF)
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
- else (NOT TEST_ERRREF)
+ else ()
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT}.err "${TEST_STREAM}")
- endif (NOT TEST_ERRREF)
+ endif ()
endif (TEST_MASK_ERROR)
+# remove text from the output file
if (TEST_FILTER)
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
- STRING(REGEX REPLACE "${TEST_FILTER}" "" TEST_STREAM "${TEST_STREAM}")
+ string (REGEX REPLACE "${TEST_FILTER}" "" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
endif (TEST_FILTER)
+# compare output files to references unless this must be skipped
if (NOT TEST_SKIP_COMPARE)
if (WIN32 AND NOT MINGW)
file (READ ${TEST_FOLDER}/${TEST_REFERENCE} TEST_STREAM)
@@ -123,21 +134,21 @@ if (NOT TEST_SKIP_COMPARE)
endif (WIN32 AND NOT MINGW)
# now compare the output with the reference
- EXECUTE_PROCESS (
+ execute_process (
COMMAND ${CMAKE_COMMAND} -E compare_files ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_FOLDER}/${TEST_REFERENCE}
RESULT_VARIABLE TEST_RESULT
)
if (NOT ${TEST_RESULT} STREQUAL 0)
set (TEST_RESULT 0)
file (STRINGS ${TEST_FOLDER}/${TEST_OUTPUT} test_act)
- LIST (LENGTH test_act len_act)
+ list (LENGTH test_act len_act)
file (STRINGS ${TEST_FOLDER}/${TEST_REFERENCE} test_ref)
- LIST (LENGTH test_ref len_ref)
+ list (LENGTH test_ref len_ref)
if (NOT ${len_act} STREQUAL "0")
- MATH (EXPR _FP_LEN "${len_ref} - 1")
+ math (EXPR _FP_LEN "${len_ref} - 1")
foreach (line RANGE 0 ${_FP_LEN})
- LIST (GET test_act ${line} str_act)
- LIST (GET test_ref ${line} str_ref)
+ list (GET test_act ${line} str_act)
+ list (GET test_ref ${line} str_ref)
if (NOT "${str_act}" STREQUAL "${str_ref}")
if (NOT "${str_act}" STREQUAL "")
set (TEST_RESULT 1)
@@ -157,7 +168,8 @@ if (NOT TEST_SKIP_COMPARE)
if (NOT ${TEST_RESULT} STREQUAL 0)
message (FATAL_ERROR "Failed: The output of ${TEST_OUTPUT} did not match ${TEST_REFERENCE}")
endif (NOT ${TEST_RESULT} STREQUAL 0)
-
+
+ # now compare the .err file with the error reference, if supplied
if (TEST_ERRREF)
if (WIN32 AND NOT MINGW)
file (READ ${TEST_FOLDER}/${TEST_ERRREF} TEST_STREAM)
@@ -165,22 +177,22 @@ if (NOT TEST_SKIP_COMPARE)
endif (WIN32 AND NOT MINGW)
# now compare the error output with the error reference
- EXECUTE_PROCESS (
+ execute_process (
COMMAND ${CMAKE_COMMAND} -E compare_files ${TEST_FOLDER}/${TEST_OUTPUT}.err ${TEST_FOLDER}/${TEST_ERRREF}
RESULT_VARIABLE TEST_RESULT
)
if (NOT ${TEST_RESULT} STREQUAL 0)
set (TEST_RESULT 0)
file (STRINGS ${TEST_FOLDER}/${TEST_OUTPUT}.err test_act)
- LIST (LENGTH test_act len_act)
+ list (LENGTH test_act len_act)
file (STRINGS ${TEST_FOLDER}/${TEST_ERRREF} test_ref)
- LIST (LENGTH test_ref len_ref)
- MATH (EXPR _FP_LEN "${len_ref} - 1")
+ list (LENGTH test_ref len_ref)
+ math (EXPR _FP_LEN "${len_ref} - 1")
if (NOT ${len_act} STREQUAL "0")
- MATH (EXPR _FP_LEN "${len_ref} - 1")
+ math (EXPR _FP_LEN "${len_ref} - 1")
foreach (line RANGE 0 ${_FP_LEN})
- LIST (GET test_act ${line} str_act)
- LIST (GET test_ref ${line} str_ref)
+ list (GET test_act ${line} str_act)
+ list (GET test_ref ${line} str_ref)
if (NOT "${str_act}" STREQUAL "${str_ref}")
if (NOT "${str_act}" STREQUAL "")
set (TEST_RESULT 1)
diff --git a/configure b/configure
index f37d2fa..8954ec4 100755
--- a/configure
+++ b/configure
@@ -1,7 +1,7 @@
#! /bin/sh
-# From configure.ac Id: configure.ac 6194 2015-02-05 14:17:12Z bmribler .
+# From configure.ac Id: configure.ac 6473 2016-06-24 21:38:14Z bmribler .
# Guess values for system-dependent variables and create Makefiles.
-# Generated by GNU Autoconf 2.69 for HDF 4.2.11.
+# Generated by GNU Autoconf 2.69 for HDF 4.2.12.
#
# Report bugs to <help at hdfgroup.org>.
#
@@ -591,8 +591,8 @@ MAKEFLAGS=
# Identity of this package.
PACKAGE_NAME='HDF'
PACKAGE_TARNAME='hdf'
-PACKAGE_VERSION='4.2.11'
-PACKAGE_STRING='HDF 4.2.11'
+PACKAGE_VERSION='4.2.12'
+PACKAGE_STRING='HDF 4.2.12'
PACKAGE_BUGREPORT='help at hdfgroup.org'
PACKAGE_URL=''
@@ -690,6 +690,26 @@ LEX
YACC
AR
LN_S
+BUILD_JAVA_CONDITIONAL_FALSE
+BUILD_JAVA_CONDITIONAL_TRUE
+JAVA_PATH_NAME
+TESTS_JUNIT
+JAVA_JUNIT
+JUNIT
+_ACJNI_JAVAC
+JAVADOC
+JAR
+UUDECODE
+JAVAC
+JAVA
+JAVAFLAGS
+JAVACFLAGS
+JAVAPREFIX
+H4_CLASSPATH
+HDF_JAVA
+H4_JAVAFLAGS
+H4_JAVACFLAGS
+JNIFLAGS
BUILD_FORTRAN
HDF_BUILD_FORTRAN_FALSE
HDF_BUILD_FORTRAN_TRUE
@@ -809,6 +829,10 @@ enable_maintainer_mode
enable_netcdf
enable_dependency_tracking
enable_fortran
+enable_java
+with_java_prefix
+with_javac_flags
+with_java_flags
enable_shared
enable_static
with_pic
@@ -1380,7 +1404,7 @@ if test "$ac_init_help" = "long"; then
# Omit some internal or obsolete options to make the list less imposing.
# This message is too long to be a string in the A/UX 3.1 sh.
cat <<_ACEOF
-\`configure' configures HDF 4.2.11 to adapt to many kinds of systems.
+\`configure' configures HDF 4.2.12 to adapt to many kinds of systems.
Usage: $0 [OPTION]... [VAR=VALUE]...
@@ -1450,7 +1474,7 @@ fi
if test -n "$ac_init_help"; then
case $ac_init_help in
- short | recursive ) echo "Configuration of HDF 4.2.11:";;
+ short | recursive ) echo "Configuration of HDF 4.2.12:";;
esac
cat <<\_ACEOF
@@ -1470,6 +1494,7 @@ Optional Features:
--disable-dependency-tracking
speeds up one-time build
--enable-fortran Build Fortran into library [default=yes]
+ --enable-java Compile the Java JNI interface [default=no]
--enable-shared[=PKGS] build shared libraries [default=no]
--enable-static[=PKGS] build static libraries [default=yes]
--enable-fast-install[=PKGS]
@@ -1486,6 +1511,9 @@ Optional Features:
Optional Packages:
--with-PACKAGE[=ARG] use PACKAGE [ARG=yes]
--without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no)
+ --with-java-prefix=PFX prefix where Java runtime is installed (optional)
+ --with-javac-flags=FLAGS flags to pass to the Java compiler (optional)
+ --with-java-flags=FLAGS flags to pass to the Java VM (optional)
--with-pic[=PKGS] try to use only PIC/non-PIC objects [default=use
both]
--with-gnu-ld assume the C compiler uses GNU ld [default=no]
@@ -1585,7 +1613,7 @@ fi
test -n "$ac_init_help" && exit $ac_status
if $ac_init_version; then
cat <<\_ACEOF
-HDF configure 4.2.11
+HDF configure 4.2.12
generated by GNU Autoconf 2.69
Copyright (C) 2012 Free Software Foundation, Inc.
@@ -2398,7 +2426,7 @@ cat >config.log <<_ACEOF
This file contains any messages produced by compilers while
running configure, to aid debugging if configure makes a mistake.
-It was created by HDF $as_me 4.2.11, which was
+It was created by HDF $as_me 4.2.12, which was
generated by GNU Autoconf 2.69. Invocation command line was
$ $0 $@
@@ -3268,7 +3296,7 @@ fi
# Define the identity of the package.
PACKAGE='hdf'
- VERSION='4.2.11'
+ VERSION='4.2.12'
cat >>confdefs.h <<_ACEOF
@@ -6133,6 +6161,1091 @@ fi
+## ----------------------------------------------------------------------
+## Check if they would like the Java native interface (JNI) compiled
+##
+
+
+
+
+## This needs to be exposed for the library info file even if Java is disabled.
+
+
+## Default is no Java
+HDF_JAVA=no
+
+ H4_CLASSPATH=""
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if Java JNI interface enabled" >&5
+$as_echo_n "checking if Java JNI interface enabled... " >&6; }
+
+# Check whether --enable-java was given.
+if test "${enable_java+set}" = set; then :
+ enableval=$enable_java; HDF_JAVA=$enableval
+fi
+
+
+if test "X$HDF_JAVA" = "Xyes"; then
+ if test "X${enable_shared}" != "Xno"; then
+ echo "yes"
+ if test "X$CLASSPATH" = "X"; then
+ H4_CLASSPATH=".:$srcdir/java/lib"
+ else
+ H4_CLASSPATH=".:$CLASSPATH:$srcdir/java/lib"
+ fi
+ ## Checks for programs.
+
+
+# Check whether --with-java-prefix was given.
+if test "${with_java_prefix+set}" = set; then :
+ withval=$with_java_prefix;
+fi
+
+
+# Check whether --with-javac-flags was given.
+if test "${with_javac_flags+set}" = set; then :
+ withval=$with_javac_flags;
+fi
+
+
+# Check whether --with-java-flags was given.
+if test "${with_java_flags+set}" = set; then :
+ withval=$with_java_flags;
+fi
+
+JAVAPREFIX=$with_java_prefix
+JAVACFLAGS=$with_javac_flags
+JAVAFLAGS=$with_java_flags
+
+ H4_JAVACFLAGS=$JAVACFLAGS
+ H4_JAVAFLAGS=$JAVAFLAGS
+
+if test "x$JAVAPREFIX" = x; then :
+ test "x$JAVAC" = x && for ac_prog in "gcj -C" guavac jikes javac
+do
+ # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_JAVAC+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$JAVAC"; then
+ ac_cv_prog_JAVAC="$JAVAC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_JAVAC="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+fi
+fi
+JAVAC=$ac_cv_prog_JAVAC
+if test -n "$JAVAC"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAVAC" >&5
+$as_echo "$JAVAC" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ test -n "$JAVAC" && break
+done
+
+else
+ test "x$JAVAC" = x && for ac_prog in "gcj -C" guavac jikes javac
+do
+ # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_JAVAC+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$JAVAC"; then
+ ac_cv_prog_JAVAC="$JAVAC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $JAVAPREFIX/bin
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_JAVAC="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+fi
+fi
+JAVAC=$ac_cv_prog_JAVAC
+if test -n "$JAVAC"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAVAC" >&5
+$as_echo "$JAVAC" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ test -n "$JAVAC" && break
+done
+
+fi
+test "x$JAVAC" = x && as_fn_error $? "no acceptable Java compiler found in \$PATH" "$LINENO" 5
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $JAVAC works" >&5
+$as_echo_n "checking if $JAVAC works... " >&6; }
+if ${ac_cv_prog_javac_works+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+
+JAVA_TEST=Test.java
+CLASS_TEST=Test.class
+cat << \EOF > $JAVA_TEST
+/* #line 6320 "configure" */
+public class Test {
+}
+EOF
+if { ac_try='$JAVAC $JAVACFLAGS $JAVA_TEST'
+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_try\""; } >&5
+ (eval $ac_try) 2>&5
+ ac_status=$?
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; }; } >/dev/null 2>&1; then
+ ac_cv_prog_javac_works=yes
+else
+ as_fn_error $? "The Java compiler $JAVAC failed (see config.log, check the CLASSPATH?)" "$LINENO" 5
+ echo "configure: failed program was:" >&5
+ cat $JAVA_TEST >&5
+fi
+rm -f $JAVA_TEST $CLASS_TEST
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_javac_works" >&5
+$as_echo "$ac_cv_prog_javac_works" >&6; }
+
+
+
+if test "x$JAVAPREFIX" = x; then :
+ test x$JAVA = x && for ac_prog in kaffe java
+do
+ # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_JAVA+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$JAVA"; then
+ ac_cv_prog_JAVA="$JAVA" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_JAVA="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+fi
+fi
+JAVA=$ac_cv_prog_JAVA
+if test -n "$JAVA"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAVA" >&5
+$as_echo "$JAVA" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ test -n "$JAVA" && break
+done
+
+else
+ test x$JAVA = x && for ac_prog in kaffe java
+do
+ # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_JAVA+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$JAVA"; then
+ ac_cv_prog_JAVA="$JAVA" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $JAVAPREFIX/bin
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_JAVA="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+fi
+fi
+JAVA=$ac_cv_prog_JAVA
+if test -n "$JAVA"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAVA" >&5
+$as_echo "$JAVA" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ test -n "$JAVA" && break
+done
+
+fi
+test x$JAVA = x && as_fn_error $? "no acceptable Java virtual machine found in \$PATH" "$LINENO" 5
+
+# Extract the first word of "uudecode", so it can be a program name with args.
+set dummy uudecode; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_path_UUDECODE+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ case $UUDECODE in
+ [\\/]* | ?:[\\/]*)
+ ac_cv_path_UUDECODE="$UUDECODE" # Let the user override the test with a path.
+ ;;
+ *)
+ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_path_UUDECODE="$as_dir/$ac_word$ac_exec_ext"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+ test -z "$ac_cv_path_UUDECODE" && ac_cv_path_UUDECODE="no"
+ ;;
+esac
+fi
+UUDECODE=$ac_cv_path_UUDECODE
+if test -n "$UUDECODE"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $UUDECODE" >&5
+$as_echo "$UUDECODE" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+if test x$UUDECODE != xno; then
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if uudecode can decode base 64 file" >&5
+$as_echo_n "checking if uudecode can decode base 64 file... " >&6; }
+if ${ac_cv_prog_uudecode_base64+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+
+cat << \EOF > Test.uue
+begin-base64 644 Test.class
+yv66vgADAC0AFQcAAgEABFRlc3QHAAQBABBqYXZhL2xhbmcvT2JqZWN0AQAE
+bWFpbgEAFihbTGphdmEvbGFuZy9TdHJpbmc7KVYBAARDb2RlAQAPTGluZU51
+bWJlclRhYmxlDAAKAAsBAARleGl0AQAEKEkpVgoADQAJBwAOAQAQamF2YS9s
+YW5nL1N5c3RlbQEABjxpbml0PgEAAygpVgwADwAQCgADABEBAApTb3VyY2VG
+aWxlAQAJVGVzdC5qYXZhACEAAQADAAAAAAACAAkABQAGAAEABwAAACEAAQAB
+AAAABQO4AAyxAAAAAQAIAAAACgACAAAACgAEAAsAAQAPABAAAQAHAAAAIQAB
+AAEAAAAFKrcAErEAAAABAAgAAAAKAAIAAAAEAAQABAABABMAAAACABQ=
+====
+EOF
+if $UUDECODE Test.uue; then
+ ac_cv_prog_uudecode_base64=yes
+else
+ echo "configure: 6495: uudecode had trouble decoding base 64 file 'Test.uue'" >&5
+ echo "configure: failed file was:" >&5
+ cat Test.uue >&5
+ ac_cv_prog_uudecode_base64=no
+fi
+rm -f Test.uue
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_uudecode_base64" >&5
+$as_echo "$ac_cv_prog_uudecode_base64" >&6; }
+fi
+if test x$ac_cv_prog_uudecode_base64 != xyes; then
+ rm -f Test.class
+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: I have to compile Test.class from scratch" >&5
+$as_echo "$as_me: WARNING: I have to compile Test.class from scratch" >&2;}
+ if test x$ac_cv_prog_javac_works = xno; then
+ as_fn_error $? "Cannot compile java source. $JAVAC does not work properly" "$LINENO" 5
+ fi
+ if test x$ac_cv_prog_javac_works = x; then
+
+if test "x$JAVAPREFIX" = x; then :
+ test "x$JAVAC" = x && for ac_prog in "gcj -C" guavac jikes javac
+do
+ # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_JAVAC+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$JAVAC"; then
+ ac_cv_prog_JAVAC="$JAVAC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_JAVAC="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+fi
+fi
+JAVAC=$ac_cv_prog_JAVAC
+if test -n "$JAVAC"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAVAC" >&5
+$as_echo "$JAVAC" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ test -n "$JAVAC" && break
+done
+
+else
+ test "x$JAVAC" = x && for ac_prog in "gcj -C" guavac jikes javac
+do
+ # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_JAVAC+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$JAVAC"; then
+ ac_cv_prog_JAVAC="$JAVAC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $JAVAPREFIX/bin
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_JAVAC="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+fi
+fi
+JAVAC=$ac_cv_prog_JAVAC
+if test -n "$JAVAC"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAVAC" >&5
+$as_echo "$JAVAC" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ test -n "$JAVAC" && break
+done
+
+fi
+test "x$JAVAC" = x && as_fn_error $? "no acceptable Java compiler found in \$PATH" "$LINENO" 5
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $JAVAC works" >&5
+$as_echo_n "checking if $JAVAC works... " >&6; }
+if ${ac_cv_prog_javac_works+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+
+JAVA_TEST=Test.java
+CLASS_TEST=Test.class
+cat << \EOF > $JAVA_TEST
+/* #line 6612 "configure" */
+public class Test {
+}
+EOF
+if { ac_try='$JAVAC $JAVACFLAGS $JAVA_TEST'
+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_try\""; } >&5
+ (eval $ac_try) 2>&5
+ ac_status=$?
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; }; } >/dev/null 2>&1; then
+ ac_cv_prog_javac_works=yes
+else
+ as_fn_error $? "The Java compiler $JAVAC failed (see config.log, check the CLASSPATH?)" "$LINENO" 5
+ echo "configure: failed program was:" >&5
+ cat $JAVA_TEST >&5
+fi
+rm -f $JAVA_TEST $CLASS_TEST
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_javac_works" >&5
+$as_echo "$ac_cv_prog_javac_works" >&6; }
+
+
+ fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $JAVA works" >&5
+$as_echo_n "checking if $JAVA works... " >&6; }
+if ${ac_cv_prog_java_works+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+
+JAVA_TEST=Test.java
+CLASS_TEST=Test.class
+TEST=Test
+cat << \EOF > $JAVA_TEST
+/* [#]line 6647 "configure" */
+public class Test {
+public static void main (String args[]) {
+ System.exit (0);
+} }
+EOF
+if test x$ac_cv_prog_uudecode_base64 != xyes; then
+ if { ac_try='$JAVAC $JAVACFLAGS $JAVA_TEST'
+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_try\""; } >&5
+ (eval $ac_try) 2>&5
+ ac_status=$?
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; }; } && test -s $CLASS_TEST; then
+ :
+ else
+ echo "configure: failed program was:" >&5
+ cat $JAVA_TEST >&5
+ as_fn_error $? "The Java compiler $JAVAC failed (see config.log, check the CLASSPATH?)" "$LINENO" 5
+ fi
+fi
+if { ac_try='$JAVA -classpath . $JAVAFLAGS $TEST'
+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_try\""; } >&5
+ (eval $ac_try) 2>&5
+ ac_status=$?
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; }; } >/dev/null 2>&1; then
+ ac_cv_prog_java_works=yes
+else
+ echo "configure: failed program was:" >&5
+ cat $JAVA_TEST >&5
+ as_fn_error $? "The Java VM $JAVA failed (see config.log, check the CLASSPATH?)" "$LINENO" 5
+fi
+rm -fr $JAVA_TEST $CLASS_TEST Test.uue
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_java_works" >&5
+$as_echo "$ac_cv_prog_java_works" >&6; }
+
+
+
+
+if test "x$JAVAPREFIX" = x; then :
+ test "x$JAR" = x && for ac_prog in jar
+do
+ # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_JAR+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$JAR"; then
+ ac_cv_prog_JAR="$JAR" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_JAR="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+fi
+fi
+JAR=$ac_cv_prog_JAR
+if test -n "$JAR"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAR" >&5
+$as_echo "$JAR" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ test -n "$JAR" && break
+done
+
+else
+ test "x$JAR" = x && for ac_prog in jar
+do
+ # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_JAR+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$JAR"; then
+ ac_cv_prog_JAR="$JAR" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $JAVAPREFIX/bin
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_JAR="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+fi
+fi
+JAR=$ac_cv_prog_JAR
+if test -n "$JAR"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAR" >&5
+$as_echo "$JAR" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ test -n "$JAR" && break
+done
+
+fi
+test "x$JAR" = x && as_fn_error $? "no acceptable jar program found in \$PATH" "$LINENO" 5
+
+
+if test "x$JAVAPREFIX" = x; then :
+ test "x$JAVADOC" = x && for ac_prog in javadoc
+do
+ # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_JAVADOC+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$JAVADOC"; then
+ ac_cv_prog_JAVADOC="$JAVADOC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_JAVADOC="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+fi
+fi
+JAVADOC=$ac_cv_prog_JAVADOC
+if test -n "$JAVADOC"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAVADOC" >&5
+$as_echo "$JAVADOC" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ test -n "$JAVADOC" && break
+done
+
+else
+ test "x$JAVADOC" = x && for ac_prog in javadoc
+do
+ # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_JAVADOC+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$JAVADOC"; then
+ ac_cv_prog_JAVADOC="$JAVADOC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $JAVAPREFIX/bin
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_JAVADOC="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+fi
+fi
+JAVADOC=$ac_cv_prog_JAVADOC
+if test -n "$JAVADOC"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAVADOC" >&5
+$as_echo "$JAVADOC" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ test -n "$JAVADOC" && break
+done
+
+fi
+test "x$JAVADOC" = x && as_fn_error $? "no acceptable javadoc generator found in \$PATH" "$LINENO" 5
+
+ ## Find the include directories needed for building JNI code
+
+
+JNI_INCLUDE_DIRS=""
+
+if test "x$JAVA_HOME" != x; then
+ _JTOPDIR="$JAVA_HOME"
+else
+ if test "x$JAVAC" = x; then
+ JAVAC=javac
+ fi
+ # Extract the first word of "$JAVAC", so it can be a program name with args.
+set dummy $JAVAC; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_path__ACJNI_JAVAC+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ case $_ACJNI_JAVAC in
+ [\\/]* | ?:[\\/]*)
+ ac_cv_path__ACJNI_JAVAC="$_ACJNI_JAVAC" # Let the user override the test with a path.
+ ;;
+ *)
+ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_path__ACJNI_JAVAC="$as_dir/$ac_word$ac_exec_ext"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+ test -z "$ac_cv_path__ACJNI_JAVAC" && ac_cv_path__ACJNI_JAVAC="no"
+ ;;
+esac
+fi
+_ACJNI_JAVAC=$ac_cv_path__ACJNI_JAVAC
+if test -n "$_ACJNI_JAVAC"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $_ACJNI_JAVAC" >&5
+$as_echo "$_ACJNI_JAVAC" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ if test "x$_ACJNI_JAVAC" = xno; then
+ as_fn_error $? "cannot find JDK; try setting \$JAVAC or \$JAVA_HOME" "$LINENO" 5
+ fi
+
+# find the include directory relative to the javac executable
+_cur=""$_ACJNI_JAVAC""
+while ls -ld "$_cur" 2>/dev/null | grep " -> " >/dev/null; do
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking symlink for $_cur" >&5
+$as_echo_n "checking symlink for $_cur... " >&6; }
+ _slink=`ls -ld "$_cur" | sed 's/.* -> //'`
+ case "$_slink" in
+ /*) _cur="$_slink";;
+ # 'X' avoids triggering unwanted echo options.
+ *) _cur=`echo "X$_cur" | sed -e 's/^X//' -e 's:[^/]*$::'`"$_slink";;
+ esac
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $_cur" >&5
+$as_echo "$_cur" >&6; }
+done
+_ACJNI_FOLLOWED="$_cur"
+
+ _JTOPDIR=`echo "$_ACJNI_FOLLOWED" | sed -e 's://*:/:g' -e 's:/[^/]*$::'`
+fi
+
+case "$host_os" in
+ darwin*) # Apple JDK is at /System location and has headers symlinked elsewhere
+ case "$_JTOPDIR" in
+ /System/Library/Frameworks/JavaVM.framework/*)
+ _JTOPDIR=`echo "$_JTOPDIR" | sed -e 's:/[^/]*$::'`
+ _JINC="$_JTOPDIR/Headers";;
+ *) _JINC="$_JTOPDIR/include";;
+ esac;;
+ *) _JINC="$_JTOPDIR/include";;
+esac
+$as_echo "$as_me:${as_lineno-$LINENO}: _JTOPDIR=$_JTOPDIR" >&5
+$as_echo "$as_me:${as_lineno-$LINENO}: _JINC=$_JINC" >&5
+
+# On Mac OS X 10.6.4, jni.h is a symlink:
+# /System/Library/Frameworks/JavaVM.framework/Versions/Current/Headers/jni.h
+# -> ../../CurrentJDK/Headers/jni.h.
+as_ac_File=`$as_echo "ac_cv_file_$_JINC/jni.h" | $as_tr_sh`
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $_JINC/jni.h" >&5
+$as_echo_n "checking for $_JINC/jni.h... " >&6; }
+if eval \${$as_ac_File+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ test "$cross_compiling" = yes &&
+ as_fn_error $? "cannot check for file existence when cross compiling" "$LINENO" 5
+if test -r "$_JINC/jni.h"; then
+ eval "$as_ac_File=yes"
+else
+ eval "$as_ac_File=no"
+fi
+fi
+eval ac_res=\$$as_ac_File
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
+$as_echo "$ac_res" >&6; }
+if eval test \"x\$"$as_ac_File"\" = x"yes"; then :
+ JNI_INCLUDE_DIRS="$JNI_INCLUDE_DIRS $_JINC"
+else
+ _JTOPDIR=`echo "$_JTOPDIR" | sed -e 's:/[^/]*$::'`
+ as_ac_File=`$as_echo "ac_cv_file_$_JTOPDIR/include/jni.h" | $as_tr_sh`
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $_JTOPDIR/include/jni.h" >&5
+$as_echo_n "checking for $_JTOPDIR/include/jni.h... " >&6; }
+if eval \${$as_ac_File+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ test "$cross_compiling" = yes &&
+ as_fn_error $? "cannot check for file existence when cross compiling" "$LINENO" 5
+if test -r "$_JTOPDIR/include/jni.h"; then
+ eval "$as_ac_File=yes"
+else
+ eval "$as_ac_File=no"
+fi
+fi
+eval ac_res=\$$as_ac_File
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
+$as_echo "$ac_res" >&6; }
+if eval test \"x\$"$as_ac_File"\" = x"yes"; then :
+ JNI_INCLUDE_DIRS="$JNI_INCLUDE_DIRS $_JTOPDIR/include"
+else
+ as_fn_error $? "cannot find JDK header files" "$LINENO" 5
+fi
+
+
+fi
+
+
+# get the likely subdirectories for system specific java includes
+case "$host_os" in
+bsdi*) _JNI_INC_SUBDIRS="bsdos";;
+freebsd*) _JNI_INC_SUBDIRS="freebsd";;
+darwin*) _JNI_INC_SUBDIRS="darwin";;
+linux*) _JNI_INC_SUBDIRS="linux genunix";;
+osf*) _JNI_INC_SUBDIRS="alpha";;
+solaris*) _JNI_INC_SUBDIRS="solaris";;
+mingw*) _JNI_INC_SUBDIRS="win32";;
+cygwin*) _JNI_INC_SUBDIRS="win32";;
+*) _JNI_INC_SUBDIRS="genunix";;
+esac
+
+# add any subdirectories that are present
+for JINCSUBDIR in $_JNI_INC_SUBDIRS
+do
+ if test -d "$_JTOPDIR/include/$JINCSUBDIR"; then
+ JNI_INCLUDE_DIRS="$JNI_INCLUDE_DIRS $_JTOPDIR/include/$JINCSUBDIR"
+ fi
+done
+
+ for JNI_INCLUDE_DIR in $JNI_INCLUDE_DIRS
+ do
+ JNIFLAGS="$JNIFLAGS -I$JNI_INCLUDE_DIR"
+ done
+ ## Find junit for testing the JNI code
+
+if test "x$CLASSPATH" = x; then
+ echo "You have no CLASSPATH, I hope it is good"
+else
+ echo "You have CLASSPATH $CLASSPATH, hope it is correct"
+fi
+
+ CLASSPATH_ENV=$H4_CLASSPATH
+
+if ${ac_cv_prog_JUNIT+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+
+
+
+ac_var_name=`echo junit.textui.TestRunner | sed 's/\./_/g'`
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for junit.textui.TestRunner class" >&5
+$as_echo_n "checking for junit.textui.TestRunner class... " >&6; }
+if eval \${ax_cv_class_$ac_var_name+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+
+if test x$ac_cv_prog_uudecode_base64 = xyes; then
+cat << \EOF > Test.uue
+begin-base64 644 Test.class
+yv66vgADAC0AKQcAAgEABFRlc3QHAAQBABBqYXZhL2xhbmcvT2JqZWN0AQAE
+bWFpbgEAFihbTGphdmEvbGFuZy9TdHJpbmc7KVYBAARDb2RlAQAPTGluZU51
+bWJlclRhYmxlDAAKAAsBAANlcnIBABVMamF2YS9pby9QcmludFN0cmVhbTsJ
+AA0ACQcADgEAEGphdmEvbGFuZy9TeXN0ZW0IABABABBNaXNzaW5nIGFyZ3Vt
+ZW50DAASABMBAAdwcmludGxuAQAVKExqYXZhL2xhbmcvU3RyaW5nOylWCgAV
+ABEHABYBABNqYXZhL2lvL1ByaW50U3RyZWFtDAAYABkBAARleGl0AQAEKEkp
+VgoADQAXDAAcAB0BAAdmb3JOYW1lAQAlKExqYXZhL2xhbmcvU3RyaW5nOylM
+amF2YS9sYW5nL0NsYXNzOwoAHwAbBwAgAQAPamF2YS9sYW5nL0NsYXNzBwAi
+AQAgamF2YS9sYW5nL0NsYXNzTm90Rm91bmRFeGNlcHRpb24BAAY8aW5pdD4B
+AAMoKVYMACMAJAoAAwAlAQAKU291cmNlRmlsZQEACVRlc3QuamF2YQAhAAEA
+AwAAAAAAAgAJAAUABgABAAcAAABtAAMAAwAAACkqvgSiABCyAAwSD7YAFBBN
+uAAaKgMyuAAeTKcACE0EuAAaAUwDuAAasQABABMAGgAdACEAAQAIAAAAKgAK
+AAAACgAAAAsABgANAA4ADgATABAAEwASAB4AFgAiABgAJAAZACgAGgABACMA
+JAABAAcAAAAhAAEAAQAAAAUqtwAmsQAAAAEACAAAAAoAAgAAAAQABAAEAAEA
+JwAAAAIAKA==
+====
+EOF
+ if $UUDECODE Test.uue; then
+ :
+ else
+ echo "configure: 7077: uudecode had trouble decoding base 64 file 'Test.uue'" >&5
+ echo "configure: failed file was:" >&5
+ cat Test.uue >&5
+ ac_cv_prog_uudecode_base64=no
+ fi
+ rm -f Test.uue
+ if { ac_try='$JAVA $JAVAFLAGS Test junit.textui.TestRunner'
+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_try\""; } >&5
+ (eval $ac_try) 2>&5
+ ac_status=$?
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; }; } >/dev/null 2>&1; then
+ eval "ac_cv_class_$ac_var_name=yes"
+ else
+ eval "ac_cv_class_$ac_var_name=no"
+ fi
+ rm -f Test.class
+else
+
+cat << \EOF > Test.java
+/* #line 7097 "configure" */
+import junit.textui.TestRunner;
+public class Test {
+
+}
+EOF
+if { ac_try='$JAVAC $JAVACFLAGS Test.java'
+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_try\""; } >&5
+ (eval $ac_try) 2>&5
+ ac_status=$?
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; }; } && test -s Test.class
+then
+ eval "ac_cv_class_$ac_var_name=yes"
+else
+ echo "configure: failed program was:" >&5
+ cat Test.java >&5
+ rm -fr Test.java Test.class
+ eval "ac_cv_class_$ac_var_name=no"
+fi
+rm -fr Test.java Test.class
+fi
+eval "ac_var_val=$`eval echo ac_cv_class_$ac_var_name`"
+eval "HAVE_$ac_var_name=$`echo ac_cv_class_$ac_var_val`"
+HAVE_LAST_CLASS=$ac_var_val
+if test x$ac_var_val = xyes; then
+ :
+else
+ :
+fi
+
+fi
+
+eval "ac_var_val=$`eval echo ac_cv_class_$ac_var_name`"
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_var_val" >&5
+$as_echo "$ac_var_val" >&6; }
+
+if test x"`eval 'echo $ac_cv_class_junit_textui_TestRunner'`" != xno ; then
+ ac_cv_prog_JUNIT='$(CLASSPATH_ENV) $(JAVA) $(JAVAFLAGS) junit.textui.TestRunner'
+fi
+fi
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for junit" >&5
+$as_echo_n "checking for junit... " >&6; }
+if test x"`eval 'echo $ac_cv_prog_JUNIT'`" != x ; then
+ JUNIT="$ac_cv_prog_JUNIT"
+ JAVA_JUNIT='$(JAVA_JUNIT)'
+ TESTS_JUNIT='$(TESTS_JUNIT)'
+else
+ JUNIT=
+ JAVA_JUNIT=
+ TESTS_JUNIT=
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAVA_JUNIT" >&5
+$as_echo "$JAVA_JUNIT" >&6; }
+
+
+
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for JAVA_HOME" >&5
+$as_echo_n "checking for JAVA_HOME... " >&6; }
+# We used a fake loop so that we can use "break" to exit when the result
+# is found.
+while true
+do
+ # If the user defined JAVA_HOME, don't touch it.
+ test "${JAVA_HOME+set}" = set && break
+
+ # On Mac OS X 10.5 and following, run /usr/libexec/java_home to get
+ # the value of JAVA_HOME to use.
+ # (http://developer.apple.com/library/mac/#qa/qa2001/qa1170.html).
+ JAVA_HOME=`/usr/libexec/java_home 2>/dev/null`
+ test x"$JAVA_HOME" != x && break
+
+ # See if we can find the java executable, and compute from there.
+ TRY_JAVA_HOME=`ls -dr /usr/java/* 2> /dev/null | head -n 1`
+ if test x$TRY_JAVA_HOME != x; then
+ PATH=$PATH:$TRY_JAVA_HOME/bin
+ fi
+ # Extract the first word of "java", so it can be a program name with args.
+set dummy java; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_path_JAVA_PATH_NAME+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ case $JAVA_PATH_NAME in
+ [\\/]* | ?:[\\/]*)
+ ac_cv_path_JAVA_PATH_NAME="$JAVA_PATH_NAME" # Let the user override the test with a path.
+ ;;
+ *)
+ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_path_JAVA_PATH_NAME="$as_dir/$ac_word$ac_exec_ext"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+ ;;
+esac
+fi
+JAVA_PATH_NAME=$ac_cv_path_JAVA_PATH_NAME
+if test -n "$JAVA_PATH_NAME"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAVA_PATH_NAME" >&5
+$as_echo "$JAVA_PATH_NAME" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ if test "x$JAVA_PATH_NAME" != x; then
+ JAVA_HOME=`echo $JAVA_PATH_NAME | sed "s/\(.*\)[/]bin[/]java.*/\1/"`
+ break
+ fi
+
+ { $as_echo "$as_me:${as_lineno-$LINENO}: Could not compute JAVA_HOME" >&5
+$as_echo "$as_me: Could not compute JAVA_HOME" >&6;}
+ break
+done
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAVA_HOME" >&5
+$as_echo "$JAVA_HOME" >&6; }
+
+
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+ else
+ as_fn_error $? "Java requires shared libraries to be built" "$LINENO" 5
+ HDF_JAVA="no"
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+ fi
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+ if test "X$HDF_JAVA" = "Xyes"; then
+ BUILD_JAVA_CONDITIONAL_TRUE=
+ BUILD_JAVA_CONDITIONAL_FALSE='#'
+else
+ BUILD_JAVA_CONDITIONAL_TRUE='#'
+ BUILD_JAVA_CONDITIONAL_FALSE=
+fi
+
+
## -------------------------------------------------------------------------
## Build static libraries by default. Furthermore, fortran shared libraries
## are unsupported. Disallow a user from enabling both shared libraries and
@@ -20260,7 +21373,7 @@ Usage: $0 [OPTIONS]
Report bugs to <bug-libtool at gnu.org>."
lt_cl_version="\
-HDF config.lt 4.2.11
+HDF config.lt 4.2.12
configured by $0, generated by GNU Autoconf 2.69.
Copyright (C) 2011 Free Software Foundation, Inc.
@@ -23752,7 +24865,10 @@ $as_echo "#define NO_DEPRECATED_SYMBOLS 1" >>confdefs.h
;;
esac
-ac_config_files="$ac_config_files Makefile libhdf4.settings hdf/Makefile hdf/examples/Makefile hdf/fortran/Makefile hdf/fortran/examples/Makefile hdf/src/Makefile hdf/test/Makefile hdf/util/Makefile hdf/util/h4cc hdf/util/h4fc hdf/util/h4redeploy hdf/util/testutil.sh man/Makefile mfhdf/fortran/ftest.f mfhdf/fortran/jackets.c mfhdf/fortran/netcdf.inc mfhdf/libsrc/netcdf.h mfhdf/Makefile mfhdf/dumper/Makefile mfhdf/dumper/testhdp.sh mfhdf/examples/Makefile mfhdf/examples/testexamples.sh mf [...]
+ac_config_files="$ac_config_files Makefile libhdf4.settings hdf/Makefile hdf/examples/Makefile hdf/fortran/Makefile hdf/fortran/examples/Makefile hdf/src/Makefile hdf/test/Makefile hdf/util/Makefile hdf/util/h4cc hdf/util/h4fc hdf/util/h4redeploy hdf/util/testutil.sh man/Makefile mfhdf/fortran/ftest.f mfhdf/fortran/jackets.c mfhdf/fortran/netcdf.inc mfhdf/libsrc/netcdf.h mfhdf/Makefile mfhdf/dumper/Makefile mfhdf/dumper/testhdp.sh mfhdf/examples/Makefile mfhdf/examples/testexamples.sh mf [...]
+
+
+ac_config_commands="$ac_config_commands .classes"
cat >confcache <<\_ACEOF
@@ -23904,6 +25020,10 @@ if test -z "${HDF_BUILD_FORTRAN_TRUE}" && test -z "${HDF_BUILD_FORTRAN_FALSE}";
as_fn_error $? "conditional \"HDF_BUILD_FORTRAN\" was never defined.
Usually this means the macro was only invoked conditionally." "$LINENO" 5
fi
+if test -z "${BUILD_JAVA_CONDITIONAL_TRUE}" && test -z "${BUILD_JAVA_CONDITIONAL_FALSE}"; then
+ as_fn_error $? "conditional \"BUILD_JAVA_CONDITIONAL\" was never defined.
+Usually this means the macro was only invoked conditionally." "$LINENO" 5
+fi
if test -z "${BUILD_SHARED_SZIP_CONDITIONAL_TRUE}" && test -z "${BUILD_SHARED_SZIP_CONDITIONAL_FALSE}"; then
as_fn_error $? "conditional \"BUILD_SHARED_SZIP_CONDITIONAL\" was never defined.
Usually this means the macro was only invoked conditionally." "$LINENO" 5
@@ -24314,7 +25434,7 @@ cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
# report actual input values of CONFIG_FILES etc. instead of their
# values after options handling.
ac_log="
-This file was extended by HDF $as_me 4.2.11, which was
+This file was extended by HDF $as_me 4.2.12, which was
generated by GNU Autoconf 2.69. Invocation command line was
CONFIG_FILES = $CONFIG_FILES
@@ -24380,7 +25500,7 @@ _ACEOF
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
ac_cs_version="\\
-HDF config.status 4.2.11
+HDF config.status 4.2.12
configured by $0, generated by GNU Autoconf 2.69,
with options \\"\$ac_cs_config\\"
@@ -24962,6 +26082,9 @@ fi
ac_aux_dir='$ac_aux_dir'
+$MKDIR_P java/src/.classes;
+ $MKDIR_P java/test/.classes;
+ $MKDIR_P java/examples/.classes
_ACEOF
@@ -25017,6 +26140,13 @@ do
"mfhdf/test/Makefile") CONFIG_FILES="$CONFIG_FILES mfhdf/test/Makefile" ;;
"mfhdf/test/testmfhdf.sh") CONFIG_FILES="$CONFIG_FILES mfhdf/test/testmfhdf.sh" ;;
"mfhdf/xdr/Makefile") CONFIG_FILES="$CONFIG_FILES mfhdf/xdr/Makefile" ;;
+ "java/Makefile") CONFIG_FILES="$CONFIG_FILES java/Makefile" ;;
+ "java/src/Makefile") CONFIG_FILES="$CONFIG_FILES java/src/Makefile" ;;
+ "java/src/jni/Makefile") CONFIG_FILES="$CONFIG_FILES java/src/jni/Makefile" ;;
+ "java/test/Makefile") CONFIG_FILES="$CONFIG_FILES java/test/Makefile" ;;
+ "java/test/junit.sh") CONFIG_FILES="$CONFIG_FILES java/test/junit.sh" ;;
+ "java/examples/Makefile") CONFIG_FILES="$CONFIG_FILES java/examples/Makefile" ;;
+ ".classes") CONFIG_COMMANDS="$CONFIG_COMMANDS .classes" ;;
*) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;;
esac
diff --git a/configure.ac b/configure.ac
index 72ec872..9c7e60e 100644
--- a/configure.ac
+++ b/configure.ac
@@ -11,9 +11,9 @@
## of the source code distribution tree; Copyright.html can be found at
## http://hdfgroup.org/products/hdf4/doc/Copyright.html. If you do not have
## access to either file, you may request a copy from help at hdfgroup.org.
-##
##
-AC_REVISION($Id: configure.ac 6194 2015-02-05 14:17:12Z bmribler $)
+##
+AC_REVISION($Id: configure.ac 6473 2016-06-24 21:38:14Z bmribler $)
## ======================================================================
## Initialize configure.
@@ -25,7 +25,7 @@ AC_REVISION($Id: configure.ac 6194 2015-02-05 14:17:12Z bmribler $)
## NOTE: Do not forget to change the version number here when we do a
## release!!!
##
-AC_INIT([HDF], [4.2.11], [help at hdfgroup.org])
+AC_INIT([HDF], [4.2.12], [help at hdfgroup.org])
AC_CONFIG_SRCDIR([hdf/src/atom.c])
AC_CONFIG_AUX_DIR([bin])
AC_CONFIG_HEADER([hdf/src/h4config.h])
@@ -161,12 +161,12 @@ if test "X$host_config" != "Xnone"; then
fi
## ======================================================================
-## Checks for NetCDF-2.3.2 support
+## Checks for NetCDF-2.3.2 support
## ======================================================================
# We disable Fortran netCDF APIs and their testing when --disable-netcdf is used.
-# Let's define a proper variable to be used in mfhdf/testfortran.sh.in to run
-# the netCDF Fortran APIs test program "ftest".
+# Let's define a proper variable to be used in mfhdf/testfortran.sh.in to run
+# the netCDF Fortran APIs test program "ftest".
AC_SUBST(TEST_FORTRAN_NETCDF) TEST_FORTRAN_NETCDF="yes"
AC_ARG_ENABLE([netcdf],
@@ -220,6 +220,66 @@ esac
AM_CONDITIONAL([HDF_BUILD_FORTRAN], [test "X$BUILD_FORTRAN" = "Xyes"])
AC_SUBST([BUILD_FORTRAN])
+## ----------------------------------------------------------------------
+## Check if they would like the Java native interface (JNI) compiled
+##
+AC_SUBST([JNIFLAGS])
+AC_SUBST([H4_JAVACFLAGS])
+AC_SUBST([H4_JAVAFLAGS])
+
+## This needs to be exposed for the library info file even if Java is disabled.
+AC_SUBST([HDF_JAVA])
+
+## Default is no Java
+HDF_JAVA=no
+
+AC_SUBST([H4_CLASSPATH]) H4_CLASSPATH=""
+AC_MSG_CHECKING([if Java JNI interface enabled])
+
+AC_ARG_ENABLE([java],
+ [AS_HELP_STRING([--enable-java],
+ [Compile the Java JNI interface [default=no]])],
+ [HDF_JAVA=$enableval])
+
+if test "X$HDF_JAVA" = "Xyes"; then
+ if test "X${enable_shared}" != "Xno"; then
+ echo "yes"
+ if test "X$CLASSPATH" = "X"; then
+ H4_CLASSPATH=".:$srcdir/java/lib"
+ else
+ H4_CLASSPATH=".:$CLASSPATH:$srcdir/java/lib"
+ fi
+ ## Checks for programs.
+ AX_JAVA_OPTIONS
+ H4_JAVACFLAGS=$JAVACFLAGS
+ H4_JAVAFLAGS=$JAVAFLAGS
+ AX_PROG_JAVAC
+ AX_PROG_JAVA
+ AX_PROG_JAR
+ AX_PROG_JAVADOC
+ ## Find the include directories needed for building JNI code
+ AX_JNI_INCLUDE_DIR()
+ for JNI_INCLUDE_DIR in $JNI_INCLUDE_DIRS
+ do
+ JNIFLAGS="$JNIFLAGS -I$JNI_INCLUDE_DIR"
+ done
+ ## Find junit for testing the JNI code
+ AX_CHECK_CLASSPATH()
+ CLASSPATH_ENV=$H4_CLASSPATH
+ AX_CHECK_JUNIT()
+ AX_CHECK_JAVA_HOME
+
+ AC_MSG_RESULT([yes])
+ else
+ AC_MSG_ERROR([Java requires shared libraries to be built])
+ HDF_JAVA="no"
+ AC_MSG_RESULT([no])
+ fi
+else
+ AC_MSG_RESULT([no])
+fi
+AM_CONDITIONAL([BUILD_JAVA_CONDITIONAL], [test "X$HDF_JAVA" = "Xyes"])
+
## -------------------------------------------------------------------------
## Build static libraries by default. Furthermore, fortran shared libraries
## are unsupported. Disallow a user from enabling both shared libraries and
@@ -533,7 +593,7 @@ case "$withval" in
;;
esac
-## Check to see if SZIP has encoder
+## Check to see if SZIP has encoder
if test "X$HAVE_SZIP" = "Xyes" -a "x$HAVE_SZLIB_H" = "xyes"; then
## SZLIB library is available. Check if it can encode.
AC_MSG_CHECKING([for szlib encoder])
@@ -894,7 +954,7 @@ fi
## ----------------------------------------------------------------------
-##
+##
## If --enable-static-exec and are specified together, there will be ld failures for
## "attempted static link of dynamic object" when the tools are built. This check
## will prevent that error during configure instead. It could go with the other
@@ -939,7 +999,7 @@ if test -n "$fc_version_info"; then
fi
## This part doesn't work yet since HDF4 config files do not contain
-## information for fortran_vendor and fortran_version.
+## information for fortran_vendor and fortran_version.
## Needs to be fixed EIP 2010-01-21
## if test -n "$fortran_vendor" && test -n "$fortran_version"; then
## F77_VERSION="$F77_VERSION ($fortran_vendor-$fortran_version)"
@@ -983,10 +1043,10 @@ AC_CONFIG_FILES([Makefile
hdf/util/h4redeploy
hdf/util/testutil.sh
man/Makefile
- mfhdf/fortran/ftest.f
- mfhdf/fortran/jackets.c
- mfhdf/fortran/netcdf.inc
- mfhdf/libsrc/netcdf.h
+ mfhdf/fortran/ftest.f
+ mfhdf/fortran/jackets.c
+ mfhdf/fortran/netcdf.inc
+ mfhdf/libsrc/netcdf.h
mfhdf/Makefile
mfhdf/dumper/Makefile
mfhdf/dumper/testhdp.sh
@@ -1010,7 +1070,17 @@ AC_CONFIG_FILES([Makefile
mfhdf/nctest/Makefile
mfhdf/test/Makefile
mfhdf/test/testmfhdf.sh
- mfhdf/xdr/Makefile])
+ mfhdf/xdr/Makefile
+ java/Makefile
+ java/src/Makefile
+ java/src/jni/Makefile
+ java/test/Makefile
+ java/test/junit.sh
+ java/examples/Makefile])
+
+AC_CONFIG_COMMANDS([.classes], [], [$MKDIR_P java/src/.classes;
+ $MKDIR_P java/test/.classes;
+ $MKDIR_P java/examples/.classes])
AC_OUTPUT
diff --git a/hdf/CMakeLists.txt b/hdf/CMakeLists.txt
index 85e8bb6..1b4b5b7 100644
--- a/hdf/CMakeLists.txt
+++ b/hdf/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required (VERSION 2.8.10)
+cmake_minimum_required (VERSION 3.1.0)
PROJECT (HDF4_HDF)
#-----------------------------------------------------------------------------
diff --git a/hdf/Makefile.in b/hdf/Makefile.in
index 8a4e0d1..fc6b9d4 100644
--- a/hdf/Makefile.in
+++ b/hdf/Makefile.in
@@ -81,7 +81,20 @@ DIST_COMMON = $(top_srcdir)/config/commence.am $(srcdir)/Makefile.in \
$(srcdir)/Makefile.am $(top_srcdir)/bin/mkinstalldirs
subdir = hdf
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -210,12 +223,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -258,11 +286,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hdf/examples/CMakeLists.txt b/hdf/examples/CMakeLists.txt
index 6f2128b..defffb8 100644
--- a/hdf/examples/CMakeLists.txt
+++ b/hdf/examples/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required (VERSION 2.8.10)
+cmake_minimum_required (VERSION 3.1.0)
PROJECT (HDF4_HDF_EXAMPLES C CXX)
#-----------------------------------------------------------------------------
@@ -36,15 +36,15 @@ set (examples
)
foreach (example ${examples})
- ADD_EXECUTABLE (hdf_${example} ${HDF4_HDF_EXAMPLES_SOURCE_DIR}/${example}.c)
- TARGET_NAMING (hdf_${example} ${LIB_TYPE})
- TARGET_C_PROPERTIES (hdf_${example} " " " ")
+ add_executable (hdf_${example} ${HDF4_HDF_EXAMPLES_SOURCE_DIR}/${example}.c)
+ TARGET_NAMING (hdf_${example} STATIC)
+ TARGET_C_PROPERTIES (hdf_${example} STATIC " " " ")
target_link_libraries (hdf_${example} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
endforeach (example ${examples})
-ADD_EXECUTABLE (hdf_VG_add_sds_to_vgroup ${HDF4_HDF_EXAMPLES_SOURCE_DIR}/VG_add_sds_to_vgroup.c)
-TARGET_NAMING (hdf_VG_add_sds_to_vgroup ${LIB_TYPE})
-TARGET_C_PROPERTIES (hdf_VG_add_sds_to_vgroup " " " ")
+add_executable (hdf_VG_add_sds_to_vgroup ${HDF4_HDF_EXAMPLES_SOURCE_DIR}/VG_add_sds_to_vgroup.c)
+TARGET_NAMING (hdf_VG_add_sds_to_vgroup STATIC)
+TARGET_C_PROPERTIES (hdf_VG_add_sds_to_vgroup STATIC " " " ")
target_link_libraries (hdf_VG_add_sds_to_vgroup ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
if (BUILD_TESTING)
diff --git a/hdf/examples/CMakeTests.cmake b/hdf/examples/CMakeTests.cmake
index 83bd69a..49cad62 100644
--- a/hdf/examples/CMakeTests.cmake
+++ b/hdf/examples/CMakeTests.cmake
@@ -27,19 +27,19 @@ endif (NOT "${last_test}" STREQUAL "")
set (last_test "HDF_EXAMPLES-clearall-objects")
foreach (example ${examples})
- add_test (NAME hdftest_${example} COMMAND $<TARGET_FILE:hdf_${example}>)
+ add_test (NAME HDF_EXAMPLES-${example} COMMAND $<TARGET_FILE:hdf_${example}>)
if (NOT "${last_test}" STREQUAL "")
- set_tests_properties (hdftest_${example} PROPERTIES DEPENDS ${last_test} LABELS EXAMPLES)
+ set_tests_properties (HDF_EXAMPLES-${example} PROPERTIES DEPENDS ${last_test} LABELS EXAMPLES)
else (NOT "${last_test}" STREQUAL "")
- set_tests_properties (hdftest_${example} PROPERTIES LABELS EXAMPLES)
+ set_tests_properties (HDF_EXAMPLES-${example} PROPERTIES LABELS EXAMPLES)
endif (NOT "${last_test}" STREQUAL "")
- set (last_test "hdftest_${example}")
+ set (last_test "HDF_EXAMPLES-${example}")
endforeach (example ${examples})
-add_test (NAME hdftest_VG_add_sds_to_vgroup COMMAND $<TARGET_FILE:hdf_VG_add_sds_to_vgroup>)
+add_test (NAME HDF_EXAMPLES-VG_add_sds_to_vgroup COMMAND $<TARGET_FILE:hdf_VG_add_sds_to_vgroup>)
if (NOT "${last_test}" STREQUAL "")
- set_tests_properties (hdftest_VG_add_sds_to_vgroup PROPERTIES DEPENDS ${last_test} LABELS EXAMPLES)
+ set_tests_properties (HDF_EXAMPLES-VG_add_sds_to_vgroup PROPERTIES DEPENDS ${last_test} LABELS EXAMPLES)
else (NOT "${last_test}" STREQUAL "")
- set_tests_properties (hdftest_VG_add_sds_to_vgroup PROPERTIES LABELS EXAMPLES)
+ set_tests_properties (HDF_EXAMPLES-VG_add_sds_to_vgroup PROPERTIES LABELS EXAMPLES)
endif (NOT "${last_test}" STREQUAL "")
-set (last_test "hdftest_VG_add_sds_to_vgroup")
+set (last_test "HDF_EXAMPLES-VG_add_sds_to_vgroup")
diff --git a/hdf/examples/Makefile.am b/hdf/examples/Makefile.am
index 1222374..75b7a2c 100644
--- a/hdf/examples/Makefile.am
+++ b/hdf/examples/Makefile.am
@@ -46,8 +46,8 @@ INSTALL_TOP_SCRIPT_FILES = run-all-ex.sh
INSTALL_TOP_FILES = README
# Where to install example files
-EXAMPLEDIR=$(prefix)/examples/c
-EXAMPLETOPDIR=$(prefix)/examples
+EXAMPLEDIR=${DESTDIR}$(prefix)/examples/c
+EXAMPLETOPDIR=${DESTDIR}$(prefix)/examples
# How to build C programs using h4cc
$(EXTRA_PROG): $(H4CC)
diff --git a/hdf/examples/Makefile.in b/hdf/examples/Makefile.in
index 095b650..b37d819 100644
--- a/hdf/examples/Makefile.in
+++ b/hdf/examples/Makefile.in
@@ -92,7 +92,20 @@ DIST_COMMON = $(top_srcdir)/config/commence.am \
TESTS = $(TEST_PROG)
subdir = hdf/examples
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -364,12 +377,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -412,11 +440,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
@@ -530,8 +561,8 @@ INSTALL_TOP_SCRIPT_FILES = run-all-ex.sh
INSTALL_TOP_FILES = README
# Where to install example files
-EXAMPLEDIR = $(prefix)/examples/c
-EXAMPLETOPDIR = $(prefix)/examples
+EXAMPLEDIR = ${DESTDIR}$(prefix)/examples/c
+EXAMPLETOPDIR = ${DESTDIR}$(prefix)/examples
DISTCLEANFILES = *.chkexe *.chklog
@BUILD_SHARED_SZIP_CONDITIONAL_TRUE at LD_LIBRARY_PATH = $(LL_PATH)
diff --git a/hdf/fortran/CMakeLists.txt b/hdf/fortran/CMakeLists.txt
index a9e575a..3a876ec 100644
--- a/hdf/fortran/CMakeLists.txt
+++ b/hdf/fortran/CMakeLists.txt
@@ -1,21 +1,19 @@
-cmake_minimum_required (VERSION 2.8.10)
+cmake_minimum_required (VERSION 3.1)
PROJECT (HDF4_HDF_FORTRAN C CXX Fortran)
#-----------------------------------------------------------------------------
# Make sure generated files and modules are picked up correctly
#-----------------------------------------------------------------------------
-INCLUDE_DIRECTORIES (
- ${CMAKE_Fortran_MODULE_DIRECTORY}
+INCLUDE_DIRECTORIES (
${HDF4_HDF_BINARY_DIR}
)
-
+
set (HDF4_HDF_SRC_CSTUB_FSRCS
${HDF4_HDFSOURCE_DIR}/dfanf.c
${HDF4_HDFSOURCE_DIR}/dff.c
${HDF4_HDFSOURCE_DIR}/dfpf.c
${HDF4_HDFSOURCE_DIR}/dfr8f.c
${HDF4_HDFSOURCE_DIR}/dfsdf.c
- ${HDF4_HDFSOURCE_DIR}/dfufp2i.c
${HDF4_HDFSOURCE_DIR}/dfutilf.c
${HDF4_HDFSOURCE_DIR}/df24f.c
${HDF4_HDFSOURCE_DIR}/dfufp2if.c
@@ -26,58 +24,110 @@ set (HDF4_HDF_SRC_CSTUB_FSRCS
${HDF4_HDFSOURCE_DIR}/vattrf.c
${HDF4_HDFSOURCE_DIR}/vgf.c
)
+set_source_files_properties (${HDF4_HDF_SRC_CSTUB_FSRCS} PROPERTIES LANGUAGE C)
set (HDF4_HDF_SRC_FHDRS
${HDF4_HDFSOURCE_DIR}/dffunc.inc
${HDF4_HDFSOURCE_DIR}/hdf.inc
)
-SET_SOURCE_FILES_PROPERTIES (${HDF4_HDF_SRC_CSTUB_FSRCS} PROPERTIES LANGUAGE C)
-
set (FORTRAN_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR})
#-----------------------------------------------------------------------------
# Add Main fortran library
#-----------------------------------------------------------------------------
-add_library (${HDF4_SRC_FCSTUB_LIB_TARGET} ${LIB_TYPE} ${HDF4_HDF_SRC_CSTUB_FSRCS} ${HDF4_HDF_SRC_CHDRS} ${HDF4_HDFSOURCE_DIR}/hproto_fortran.h ${HDF4_BINARY_DIR}/h4config.h)
-set_target_properties (${HDF4_SRC_FCSTUB_LIB_TARGET} PROPERTIES LINKER_LANGUAGE C)
-if (WIN32)
- add_definitions (-DDOS_FS)
-endif (WIN32)
-TARGET_C_PROPERTIES (${HDF4_SRC_FCSTUB_LIB_TARGET} " " " ")
+add_library (${HDF4_SRC_FCSTUB_LIB_TARGET} STATIC ${HDF4_HDF_SRC_CSTUB_FSRCS} ${HDF4_HDF_SRC_CHDRS} ${HDF4_HDFSOURCE_DIR}/hproto_fortran.h ${HDF4_BINARY_DIR}/h4config.h)
+TARGET_C_PROPERTIES (${HDF4_SRC_FCSTUB_LIB_TARGET} STATIC " " " ")
target_link_libraries (${HDF4_SRC_FCSTUB_LIB_TARGET} ${HDF4_SRC_LIB_TARGET})
set_global_variable (HDF4_LIBRARIES_TO_EXPORT "${HDF4_LIBRARIES_TO_EXPORT};${HDF4_SRC_FCSTUB_LIB_TARGET}")
-H4_SET_LIB_OPTIONS (${HDF4_SRC_FCSTUB_LIB_TARGET} ${HDF4_SRC_FCSTUB_LIB_NAME} ${LIB_TYPE})
+H4_SET_LIB_OPTIONS (${HDF4_SRC_FCSTUB_LIB_TARGET} ${HDF4_SRC_FCSTUB_LIB_NAME} STATIC)
+set_target_properties (${HDF4_SRC_FCSTUB_LIB_TARGET} PROPERTIES
+ FOLDER libraries/fortran
+ LINKER_LANGUAGE C
+ INTERFACE_INCLUDE_DIRECTORIES "$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/include>"
+)
+if (WIN32)
+ set_property (TARGET ${HDF4_SRC_FCSTUB_LIB_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS "DOS_FS")
+endif (WIN32)
+set (install_targets ${HDF4_SRC_FCSTUB_LIB_TARGET})
-set (HDF4_F_FORTRAN_SRCS
- ${HDF4_HDFSOURCE_DIR}/df24ff.f
- ${HDF4_HDFSOURCE_DIR}/dfanff.f
- ${HDF4_HDFSOURCE_DIR}/dfpff.f
+if (BUILD_SHARED_LIBS)
+ add_library (${HDF4_SRC_FCSTUB_LIBSH_TARGET} SHARED ${HDF4_HDF_SRC_CSTUB_FSRCS} ${HDF4_HDF_SRC_CHDRS} ${HDF4_HDFSOURCE_DIR}/hproto_fortran.h ${HDF4_BINARY_DIR}/h4config.h)
+ TARGET_C_PROPERTIES (${HDF4_SRC_FCSTUB_LIBSH_TARGET} SHARED " " " ")
+ target_link_libraries (${HDF4_SRC_FCSTUB_LIBSH_TARGET} ${HDF4_SRC_LIBSH_TARGET})
+ set_global_variable (HDF4_LIBRARIES_TO_EXPORT "${HDF4_LIBRARIES_TO_EXPORT};${HDF4_SRC_FCSTUB_LIBSH_TARGET}")
+ H4_SET_LIB_OPTIONS (${HDF4_SRC_FCSTUB_LIBSH_TARGET} ${HDF4_SRC_FCSTUB_LIB_NAME} SHARED)
+ set_target_properties (${HDF4_SRC_FCSTUB_LIBSH_TARGET} PROPERTIES
+ FOLDER libraries/fortran
+ LINKER_LANGUAGE C
+ COMPILE_DEFINITIONS "H4_BUILT_AS_DYNAMIC_LIB"
+ INTERFACE_INCLUDE_DIRECTORIES "$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/include>"
+ INTERFACE_COMPILE_DEFINITIONS H4_BUILT_AS_DYNAMIC_LIB=1
+ )
+ if (WIN32)
+ set_property (TARGET ${HDF4_SRC_FCSTUB_LIBSH_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS "DOS_FS")
+ endif (WIN32)
+ set (install_targets ${install_targets} ${HDF4_SRC_FCSTUB_LIBSH_TARGET})
+endif (BUILD_SHARED_LIBS)
+
+set (HDF4_F_FORTRAN_SRCS
+ ${HDF4_HDFSOURCE_DIR}/df24ff.f
+ ${HDF4_HDFSOURCE_DIR}/dfanff.f
+ ${HDF4_HDFSOURCE_DIR}/dfpff.f
${HDF4_HDFSOURCE_DIR}/dfr8ff.f
- ${HDF4_HDFSOURCE_DIR}/dfsdff.f
+ ${HDF4_HDFSOURCE_DIR}/dfsdff.f
${HDF4_HDFSOURCE_DIR}/dfufp2iff.f
- ${HDF4_HDFSOURCE_DIR}/dfff.f
- ${HDF4_HDFSOURCE_DIR}/hfileff.f
+ ${HDF4_HDFSOURCE_DIR}/dfff.f
+ ${HDF4_HDFSOURCE_DIR}/hfileff.f
${HDF4_HDFSOURCE_DIR}/mfgrff.f
- ${HDF4_HDFSOURCE_DIR}/vattrff.f
- ${HDF4_HDFSOURCE_DIR}/vgff.f
+ ${HDF4_HDFSOURCE_DIR}/vattrff.f
+ ${HDF4_HDFSOURCE_DIR}/vgff.f
)
+set_source_files_properties (${HDF4_F_FORTRAN_SRCS} PROPERTIES LANGUAGE Fortran)
+
#-----------------------------------------------------------------------------
-add_library (${HDF4_SRC_FORTRAN_LIB_TARGET} ${LIB_TYPE} ${HDF4_F_FORTRAN_SRCS})
+add_library (${HDF4_SRC_FORTRAN_LIB_TARGET} STATIC ${HDF4_F_FORTRAN_SRCS})
set (SHARED_LINK_FLAGS " ")
-if (WIN32)
- if (${LIB_TYPE} MATCHES "SHARED")
- if (MSVC)
- set (SHARED_LINK_FLAGS "/DLL /DEF:${HDF4_HDF_FORTRAN_SOURCE_DIR}/hdf_fortrandll.def")
- endif (MSVC)
- endif (${LIB_TYPE} MATCHES "SHARED")
-endif (WIN32)
-TARGET_FORTRAN_PROPERTIES (${HDF4_SRC_FORTRAN_LIB_TARGET} " " ${SHARED_LINK_FLAGS})
-set_target_properties (${HDF4_SRC_FORTRAN_LIB_TARGET} PROPERTIES LINKER_LANGUAGE Fortran)
+TARGET_FORTRAN_PROPERTIES (${HDF4_SRC_FORTRAN_LIB_TARGET} STATIC " " " ")
target_link_libraries (${HDF4_SRC_FORTRAN_LIB_TARGET} ${HDF4_SRC_FCSTUB_LIB_TARGET} ${LINK_LIBS})
set_global_variable (HDF4_LIBRARIES_TO_EXPORT "${HDF4_LIBRARIES_TO_EXPORT};${HDF4_SRC_FORTRAN_LIB_TARGET}")
-H4_SET_LIB_OPTIONS (${HDF4_SRC_FORTRAN_LIB_TARGET} ${HDF4_SRC_FORTRAN_LIB_NAME} ${LIB_TYPE})
-
+H4_SET_LIB_OPTIONS (${HDF4_SRC_FORTRAN_LIB_TARGET} ${HDF4_SRC_FORTRAN_LIB_NAME} STATIC)
+set_target_properties (${HDF4_SRC_FORTRAN_LIB_TARGET} PROPERTIES
+ FOLDER libraries/fortran
+ LINKER_LANGUAGE Fortran
+ INTERFACE_INCLUDE_DIRECTORIES "$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/include>"
+ Fortran_MODULE_DIRECTORY ${CMAKE_Fortran_MODULE_DIRECTORY}
+)
+if (WIN32)
+ set_property (TARGET ${HDF4_SRC_FORTRAN_LIB_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS "DOS_FS")
+endif (WIN32)
+set (install_targets ${install_targets} ${HDF4_SRC_FORTRAN_LIB_TARGET})
+
+if (BUILD_SHARED_LIBS)
+ add_library (${HDF4_SRC_FORTRAN_LIBSH_TARGET} SHARED ${HDF4_F_FORTRAN_SRCS})
+ set (SHARED_LINK_FLAGS " ")
+ if (WIN32 AND MSVC)
+ set (SHARED_LINK_FLAGS "/DLL /DEF:${HDF4_HDF_FORTRAN_SOURCE_DIR}/hdf_fortrandll.def")
+ endif (WIN32 AND MSVC)
+ TARGET_FORTRAN_PROPERTIES (${HDF4_SRC_FORTRAN_LIBSH_TARGET} SHARED " " ${SHARED_LINK_FLAGS})
+ target_link_libraries (${HDF4_SRC_FORTRAN_LIBSH_TARGET} ${HDF4_SRC_FCSTUB_LIBSH_TARGET} ${LINK_SHARED_LIBS})
+ set_global_variable (HDF4_LIBRARIES_TO_EXPORT "${HDF4_LIBRARIES_TO_EXPORT};${HDF4_SRC_FORTRAN_LIBSH_TARGET}")
+ H4_SET_LIB_OPTIONS (${HDF4_SRC_FORTRAN_LIBSH_TARGET} ${HDF4_SRC_FORTRAN_LIB_NAME} SHARED)
+ set_target_properties (${HDF4_SRC_FORTRAN_LIBSH_TARGET} PROPERTIES
+ FOLDER libraries/fortran
+ LINKER_LANGUAGE Fortran
+ COMPILE_DEFINITIONS "H4_BUILT_AS_DYNAMIC_LIB"
+ INTERFACE_INCLUDE_DIRECTORIES "$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/include>"
+ INTERFACE_COMPILE_DEFINITIONS H4_BUILT_AS_DYNAMIC_LIB=1
+ Fortran_MODULE_DIRECTORY ${CMAKE_Fortran_MODULE_DIRECTORY}
+ )
+ if (WIN32)
+ set_property (TARGET ${HDF4_SRC_FORTRAN_LIBSH_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS "DOS_FS")
+ endif (WIN32)
+ set (install_targets ${install_targets} ${HDF4_SRC_FORTRAN_LIBSH_TARGET})
+endif (BUILD_SHARED_LIBS)
+
+
if (BUILD_TESTING)
include (CMakeTests.cmake)
endif (BUILD_TESTING)
@@ -105,19 +155,18 @@ INSTALL (
#-----------------------------------------------------------------------------
if (HDF4_EXPORTED_TARGETS)
if (BUILD_SHARED_LIBS)
- INSTALL_TARGET_PDB (${HDF4_SRC_FCSTUB_LIB_TARGET} ${HDF4_INSTALL_LIB_DIR} fortlibraries)
- INSTALL_TARGET_PDB (${HDF4_SRC_FORTRAN_LIB_TARGET} ${HDF4_INSTALL_LIB_DIR} fortlibraries)
+ INSTALL_TARGET_PDB (${HDF4_SRC_FCSTUB_LIBSH_TARGET} ${HDF4_INSTALL_BIN_DIR} fortlibraries)
+ #INSTALL_TARGET_PDB (${HDF4_SRC_FORTRAN_LIBSH_TARGET} ${HDF4_INSTALL_BIN_DIR} fortlibraries)
endif (BUILD_SHARED_LIBS)
-
+
INSTALL (
- TARGETS
- ${HDF4_SRC_FCSTUB_LIB_TARGET}
- ${HDF4_SRC_FORTRAN_LIB_TARGET}
+ TARGETS
+ ${install_targets}
EXPORT
${HDF4_EXPORTED_TARGETS}
- LIBRARY DESTINATION ${HDF4_INSTALL_LIB_DIR} COMPONENT fortlibraries
+ LIBRARY DESTINATION ${HDF4_INSTALL_LIB_DIR} COMPONENT fortlibraries
ARCHIVE DESTINATION ${HDF4_INSTALL_LIB_DIR} COMPONENT fortlibraries
RUNTIME DESTINATION ${HDF4_INSTALL_BIN_DIR} COMPONENT fortlibraries
+ FRAMEWORK DESTINATION ${HDF4_INSTALL_FWRK_DIR} COMPONENT fortlibraries
)
endif (HDF4_EXPORTED_TARGETS)
-
\ No newline at end of file
diff --git a/hdf/fortran/CMakeTests.cmake b/hdf/fortran/CMakeTests.cmake
index 8bd6b7c..123835c 100644
--- a/hdf/fortran/CMakeTests.cmake
+++ b/hdf/fortran/CMakeTests.cmake
@@ -10,19 +10,19 @@
#-----------------------------------------------------------------------------
# Add test fortran stub library
#-----------------------------------------------------------------------------
- add_library (${HDF4_HDF_TEST_FCSTUB_LIB_TARGET} ${LIB_TYPE} ${HDF4_HDF_TESTSOURCE_DIR}/forsupf.c)
+ add_library (${HDF4_HDF_TEST_FCSTUB_LIB_TARGET} STATIC ${HDF4_HDF_TESTSOURCE_DIR}/forsupf.c)
set_target_properties (${HDF4_HDF_TEST_FCSTUB_LIB_TARGET} PROPERTIES LINKER_LANGUAGE C)
if (WIN32)
add_definitions (-DDOS_FS)
endif (WIN32)
- TARGET_C_PROPERTIES (${HDF4_HDF_TEST_FCSTUB_LIB_TARGET} " " " ")
+ TARGET_C_PROPERTIES (${HDF4_HDF_TEST_FCSTUB_LIB_TARGET} STATIC " " " ")
target_link_libraries (${HDF4_HDF_TEST_FCSTUB_LIB_TARGET} ${HDF4_SRC_LIB_TARGET})
- H4_SET_LIB_OPTIONS (${HDF4_HDF_TEST_FCSTUB_LIB_TARGET} ${HDF4_HDF_TEST_FCSTUB_LIB_NAME} ${LIB_TYPE})
+ H4_SET_LIB_OPTIONS (${HDF4_HDF_TEST_FCSTUB_LIB_TARGET} ${HDF4_HDF_TEST_FCSTUB_LIB_NAME} STATIC)
#-- Adding test for fortest
- ADD_EXECUTABLE (fortest ${HDF4_HDF_TESTSOURCE_DIR}/fortest.c)
- TARGET_NAMING (fortest ${LIB_TYPE})
- TARGET_C_PROPERTIES (fortest " " " ")
+ add_executable (fortest ${HDF4_HDF_TESTSOURCE_DIR}/fortest.c)
+ TARGET_NAMING (fortest STATIC)
+ TARGET_C_PROPERTIES (fortest STATIC " " " ")
target_link_libraries (fortest ${HDF4_SRC_LIB_TARGET} ${HDF4_MF_LIB_TARGET})
if (WIN32)
target_link_libraries (fortest "ws2_32.lib")
@@ -52,9 +52,9 @@
${HDF4_HDF_TESTSOURCE_DIR}/tvsetf.f
)
- ADD_EXECUTABLE (fortestF ${FORTEST_FSRCS} )
- TARGET_NAMING (fortestF ${LIB_TYPE})
- TARGET_FORTRAN_PROPERTIES (fortestF " " " ")
+ add_executable (fortestF ${FORTEST_FSRCS} )
+ TARGET_NAMING (fortestF STATIC)
+ TARGET_FORTRAN_PROPERTIES (fortestF STATIC " " " ")
target_link_libraries (fortestF ${HDF4_SRC_FORTRAN_LIB_TARGET} ${HDF4_SRC_FCSTUB_LIB_TARGET} ${HDF4_HDF_TEST_FCSTUB_LIB_TARGET} ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS} )
set_target_properties (fortestF PROPERTIES LINKER_LANGUAGE Fortran)
diff --git a/hdf/fortran/Makefile.in b/hdf/fortran/Makefile.in
index 6a57fb7..996ddf1 100644
--- a/hdf/fortran/Makefile.in
+++ b/hdf/fortran/Makefile.in
@@ -82,7 +82,20 @@ DIST_COMMON = $(top_srcdir)/config/commence.am $(srcdir)/Makefile.in \
subdir = hdf/fortran
SUBDIRS =
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -211,12 +224,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -259,11 +287,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hdf/fortran/examples/CMakeLists.txt b/hdf/fortran/examples/CMakeLists.txt
index e0436ea..7148fea 100644
--- a/hdf/fortran/examples/CMakeLists.txt
+++ b/hdf/fortran/examples/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required (VERSION 2.8.10)
+cmake_minimum_required (VERSION 3.1.0)
PROJECT (HDF4_HDF_FORTRAN_EXAMPLES C CXX Fortran)
# --------------------------------------------------------------------
# Notes: When creating examples they should be prefixed
@@ -19,12 +19,10 @@ endif (CMAKE_Fortran_COMPILER MATCHES ifort)
# Setup include Directories
#-----------------------------------------------------------------------------
INCLUDE_DIRECTORIES (
- ${CMAKE_Fortran_MODULE_DIRECTORY}
${HDF4_HDF_BINARY_DIR}
${HDF4_HDFSOURCE_DIR}
)
LINK_DIRECTORIES (
- ${CMAKE_Fortran_MODULE_DIRECTORY}
${HDF4_HDF_BINARY_DIR}
${HDF4_MFHDF_BINARY_DIR}
${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
@@ -68,24 +66,36 @@ set (skip_examples
)
foreach (example ${examples})
- ADD_EXECUTABLE (f_ex_${example} ${HDF4_HDF_FORTRAN_EXAMPLES_SOURCE_DIR}/${example}.f)
- TARGET_NAMING (f_ex_${example} ${LIB_TYPE})
- TARGET_FORTRAN_PROPERTIES (f_ex_${example} " " " ")
- set_target_properties (f_ex_${example} PROPERTIES LINKER_LANGUAGE Fortran)
+ add_executable (f_ex_${example} ${HDF4_HDF_FORTRAN_EXAMPLES_SOURCE_DIR}/${example}.f)
+ TARGET_NAMING (f_ex_${example} STATIC)
+ TARGET_FORTRAN_PROPERTIES (f_ex_${example} STATIC " " " ")
+ target_include_directories (f_ex_${example} PRIVATE ${CMAKE_Fortran_MODULE_DIRECTORY})
+ set_target_properties (f_ex_${example} PROPERTIES
+ LINKER_LANGUAGE Fortran
+ Fortran_MODULE_DIRECTORY ${CMAKE_Fortran_MODULE_DIRECTORY}
+ )
target_link_libraries (f_ex_${example} ${HDF4_SRC_FORTRAN_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
endforeach (example ${examples})
-ADD_EXECUTABLE (f_ex_VG_add_sds_to_vgroup ${HDF4_HDF_FORTRAN_EXAMPLES_SOURCE_DIR}/VG_add_sds_to_vgroup.f)
-TARGET_NAMING (f_ex_VG_add_sds_to_vgroup ${LIB_TYPE})
-TARGET_FORTRAN_PROPERTIES (f_ex_VG_add_sds_to_vgroup " " " ")
-set_target_properties (f_ex_VG_add_sds_to_vgroup PROPERTIES LINKER_LANGUAGE Fortran)
+add_executable (f_ex_VG_add_sds_to_vgroup ${HDF4_HDF_FORTRAN_EXAMPLES_SOURCE_DIR}/VG_add_sds_to_vgroup.f)
+TARGET_NAMING (f_ex_VG_add_sds_to_vgroup STATIC)
+TARGET_FORTRAN_PROPERTIES (f_ex_VG_add_sds_to_vgroup STATIC " " " ")
+target_include_directories (f_ex_VG_add_sds_to_vgroup PRIVATE ${CMAKE_Fortran_MODULE_DIRECTORY})
+set_target_properties (f_ex_VG_add_sds_to_vgroup PROPERTIES
+ LINKER_LANGUAGE Fortran
+ Fortran_MODULE_DIRECTORY ${CMAKE_Fortran_MODULE_DIRECTORY}
+)
target_link_libraries (f_ex_VG_add_sds_to_vgroup ${HDF4_MF_FORTRAN_LIB_TARGET} ${HDF4_SRC_FORTRAN_LIB_TARGET} ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
foreach (example ${skip_examples})
- ADD_EXECUTABLE (f_ex_${example} ${HDF4_HDF_FORTRAN_EXAMPLES_SOURCE_DIR}/${example}.f)
- TARGET_NAMING (f_ex_${example} ${LIB_TYPE})
- TARGET_FORTRAN_PROPERTIES (f_ex_${example} " " " ")
- set_target_properties (f_ex_${example} PROPERTIES LINKER_LANGUAGE Fortran)
+ add_executable (f_ex_${example} ${HDF4_HDF_FORTRAN_EXAMPLES_SOURCE_DIR}/${example}.f)
+ TARGET_NAMING (f_ex_${example} STATIC)
+ TARGET_FORTRAN_PROPERTIES (f_ex_${example} STATIC " " " ")
+ target_include_directories (f_ex_${example} PRIVATE ${CMAKE_Fortran_MODULE_DIRECTORY})
+ set_target_properties (f_ex_${example} PROPERTIES
+ LINKER_LANGUAGE Fortran
+ Fortran_MODULE_DIRECTORY ${CMAKE_Fortran_MODULE_DIRECTORY}
+ )
target_link_libraries (f_ex_${example} ${HDF4_SRC_FORTRAN_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
endforeach (example ${skip_examples})
diff --git a/hdf/fortran/examples/Makefile.am b/hdf/fortran/examples/Makefile.am
index aa48f92..b9a969f 100644
--- a/hdf/fortran/examples/Makefile.am
+++ b/hdf/fortran/examples/Makefile.am
@@ -41,7 +41,7 @@ INSTALL_FILES= VD_read_from_vdata.f VD_read_mixed_vdata.f \
INSTALL_SCRIPT_FILES = run-fortran-ex.sh
# Where to install Fortran example files
-EXAMPLEDIR=$(prefix)/examples/fortran
+EXAMPLEDIR=${DESTDIR}$(prefix)/examples/fortran
# How to build Fortran programs using h4fc
$(EXTRA_PROG): $(H4FC)
diff --git a/hdf/fortran/examples/Makefile.in b/hdf/fortran/examples/Makefile.in
index e255fac..d8f0f92 100644
--- a/hdf/fortran/examples/Makefile.in
+++ b/hdf/fortran/examples/Makefile.in
@@ -92,7 +92,20 @@ DIST_COMMON = $(top_srcdir)/config/commence.am \
TESTS = $(TEST_PROG)
subdir = hdf/fortran/examples
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -364,12 +377,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -412,11 +440,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
@@ -525,7 +556,7 @@ INSTALL_FILES = VD_read_from_vdata.f VD_read_mixed_vdata.f \
INSTALL_SCRIPT_FILES = run-fortran-ex.sh
# Where to install Fortran example files
-EXAMPLEDIR = $(prefix)/examples/fortran
+EXAMPLEDIR = ${DESTDIR}$(prefix)/examples/fortran
DISTCLEANFILES = *.chkexe *.chklog
@BUILD_SHARED_SZIP_CONDITIONAL_TRUE at LD_LIBRARY_PATH = $(LL_PATH)
diff --git a/hdf/src/CMakeLists.txt b/hdf/src/CMakeLists.txt
index e684424..717b41d 100644
--- a/hdf/src/CMakeLists.txt
+++ b/hdf/src/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required (VERSION 2.8.10)
+cmake_minimum_required (VERSION 3.1)
PROJECT (HDF4_HDF_SRC C CXX)
#-----------------------------------------------------------------------------
@@ -6,7 +6,7 @@ PROJECT (HDF4_HDF_SRC C CXX)
#-----------------------------------------------------------------------------
INCLUDE_DIRECTORIES (
${HDF4_HDF_BINARY_DIR}
- ${HDF4_HDFSOURCE_DIR}
+ ${HDF4_HDF_SOURCE_DIR}
)
LINK_DIRECTORIES (
${HDF4_HDF_BINARY_DIR}
@@ -120,14 +120,37 @@ set (HDF4_HDF_SRC_CHDRS
${HDF4_HDF_SRC_SOURCE_DIR}/vgint.h
)
-add_library (${HDF4_SRC_LIB_TARGET} ${LIB_TYPE} ${HDF4_HDF_SRC_CSRCS} ${HDF4_HDF_SRC_CHDRS} ${HDF4_BINARY_DIR}/h4config.h)
-if (WIN32)
- add_definitions (-DDOS_FS)
-endif (WIN32)
-TARGET_C_PROPERTIES (${HDF4_SRC_LIB_TARGET} " " " ")
+add_library (${HDF4_SRC_LIB_TARGET} STATIC ${HDF4_HDF_SRC_CSRCS} ${HDF4_HDF_SRC_CHDRS} ${HDF4_BINARY_DIR}/h4config.h)
+TARGET_C_PROPERTIES (${HDF4_SRC_LIB_TARGET} STATIC " " " ")
target_link_libraries (${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
set_global_variable (HDF4_LIBRARIES_TO_EXPORT "${HDF4_LIBRARIES_TO_EXPORT};${HDF4_SRC_LIB_TARGET}")
-H4_SET_LIB_OPTIONS (${HDF4_SRC_LIB_TARGET} ${HDF4_SRC_LIB_NAME} ${LIB_TYPE})
+H4_SET_LIB_OPTIONS (${HDF4_SRC_LIB_TARGET} ${HDF4_SRC_LIB_NAME} STATIC)
+set_target_properties (${HDF4_SRC_LIB_TARGET} PROPERTIES
+ FOLDER libraries
+ INTERFACE_INCLUDE_DIRECTORIES "$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/include>"
+)
+if (WIN32)
+ set_property (TARGET ${HDF4_SRC_LIB_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS "DOS_FS")
+endif (WIN32)
+set (install_targets ${HDF4_SRC_LIB_TARGET})
+
+if (BUILD_SHARED_LIBS)
+ add_library (${HDF4_SRC_LIBSH_TARGET} SHARED ${HDF4_HDF_SRC_CSRCS} ${HDF4_HDF_SRC_CHDRS} ${HDF4_BINARY_DIR}/h4config.h)
+ TARGET_C_PROPERTIES (${HDF4_SRC_LIBSH_TARGET} SHARED " " " ")
+ target_link_libraries (${HDF4_SRC_LIBSH_TARGET} ${LINK_SHARED_LIBS})
+ set_global_variable (HDF4_LIBRARIES_TO_EXPORT "${HDF4_LIBRARIES_TO_EXPORT};${HDF4_SRC_LIBSH_TARGET}")
+ H4_SET_LIB_OPTIONS (${HDF4_SRC_LIBSH_TARGET} ${HDF4_SRC_LIB_NAME} SHARED)
+ set_target_properties (${HDF4_SRC_LIBSH_TARGET} PROPERTIES
+ FOLDER libraries
+ COMPILE_DEFINITIONS "H4_BUILT_AS_DYNAMIC_LIB"
+ INTERFACE_INCLUDE_DIRECTORIES "$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/include>"
+ INTERFACE_COMPILE_DEFINITIONS H4_BUILT_AS_DYNAMIC_LIB=1
+ )
+ if (WIN32)
+ set_property (TARGET ${HDF4_SRC_LIBSH_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS "DOS_FS")
+ endif (WIN32)
+ set (install_targets ${install_targets} ${HDF4_SRC_LIBSH_TARGET})
+endif (BUILD_SHARED_LIBS)
#-----------------------------------------------------------------------------
# Add file(s) to CMake Install
@@ -146,17 +169,18 @@ INSTALL (
#-----------------------------------------------------------------------------
if (HDF4_EXPORTED_TARGETS)
if (BUILD_SHARED_LIBS)
- INSTALL_TARGET_PDB (${HDF4_SRC_LIB_TARGET} ${HDF4_INSTALL_LIB_DIR} libraries)
+ INSTALL_TARGET_PDB (${HDF4_SRC_LIBSH_TARGET} ${HDF4_INSTALL_BIN_DIR} libraries)
endif (BUILD_SHARED_LIBS)
-
+
INSTALL (
- TARGETS
- ${HDF4_SRC_LIB_TARGET}
+ TARGETS
+ ${install_targets}
EXPORT
${HDF4_EXPORTED_TARGETS}
- LIBRARY DESTINATION ${HDF4_INSTALL_LIB_DIR} COMPONENT libraries
+ LIBRARY DESTINATION ${HDF4_INSTALL_LIB_DIR} COMPONENT libraries
ARCHIVE DESTINATION ${HDF4_INSTALL_LIB_DIR} COMPONENT libraries
RUNTIME DESTINATION ${HDF4_INSTALL_BIN_DIR} COMPONENT libraries
+ FRAMEWORK DESTINATION ${HDF4_INSTALL_FWRK_DIR} COMPONENT libraries
)
endif (HDF4_EXPORTED_TARGETS)
diff --git a/hdf/src/H4api_adpt.h b/hdf/src/H4api_adpt.h
index 4ce355a..c80f9c6 100644
--- a/hdf/src/H4api_adpt.h
+++ b/hdf/src/H4api_adpt.h
@@ -28,15 +28,15 @@
/* This will only be defined if HDF4 was built with CMake */
#if defined(H4_BUILT_AS_DYNAMIC_LIB)
-#if defined(xdr_EXPORTS)
+#if defined(xdr_shared_EXPORTS)
#if defined (_MSC_VER) || defined(__MINGW32__) /* MSVC Compiler Case */
#define XDRLIBAPI extern __declspec(dllexport)
#elif (__GNUC__ >= 4) /* GCC 4.x has support for visibility options */
#define XDRLIBAPI extern __attribute__ ((visibility("default")))
#endif
-#endif /* xdr_EXPORTS */
+#endif /* xdr_shared_EXPORTS */
-#if defined(hdf_EXPORTS)
+#if defined(hdf_shared_EXPORTS)
#if defined (_MSC_VER) || defined(__MINGW32__) /* MSVC Compiler Case */
#define HDFERRPUBLIC __declspec(dllimport)
#define HDFPUBLIC __declspec(dllexport)
@@ -48,9 +48,9 @@
#define HDFLIBAPI extern __attribute__ ((visibility("default")))
#define HDFFCLIBAPI extern __attribute__ ((visibility("default")))
#endif
-#endif /* hdf_EXPORTS */
+#endif /* hdf_shared_EXPORTS */
-#if defined(hdf_fcstub_EXPORTS)
+#if defined(hdf_fcstub_shared_EXPORTS)
#if defined (_MSC_VER) || defined(__MINGW32__) /* MSVC Compiler Case */
#define HDFPUBLIC __declspec(dllexport)
#define HDFLIBAPI extern __declspec(dllimport)
@@ -60,9 +60,9 @@
#define HDFLIBAPI extern __attribute__ ((visibility("default")))
#define HDFFCLIBAPI extern __attribute__ ((visibility("default")))
#endif
-#endif /* hdf_fcstub_EXPORTS */
+#endif /* hdf_fcstub_shared_EXPORTS */
-#if defined(mfhdf_EXPORTS)
+#if defined(mfhdf_shared_EXPORTS)
#if defined (_MSC_VER) || defined(__MINGW32__) /* MSVC Compiler Case */
#define HDFERRPUBLIC extern __declspec(dllimport)
#define HDFPUBLIC __declspec(dllimport)
@@ -74,9 +74,9 @@
#define HDFLIBAPI extern __attribute__ ((visibility("default")))
#define HDFFCLIBAPI extern __attribute__ ((visibility("default")))
#endif
-#endif /* mfhdf_EXPORTS */
+#endif /* mfhdf_shared_EXPORTS */
-#if defined(mfhdf_fcstub_EXPORTS)
+#if defined(mfhdf_fcstub_shared_EXPORTS)
#if defined (_MSC_VER) || defined(__MINGW32__) /* MSVC Compiler Case */
#define HDFPUBLIC __declspec(dllimport)
#define HDFLIBAPI extern __declspec(dllimport)
@@ -86,17 +86,17 @@
#define HDFLIBAPI extern __attribute__ ((visibility("default")))
#define HDFFCLIBAPI extern __attribute__ ((visibility("default")))
#endif
-#endif /* mfhdf_fcstub_EXPORTS */
+#endif /* mfhdf_shared_fcstub_EXPORTS */
-#if defined(hdf_test_fcstub_EXPORTS)
+#if defined(hdf_test_fcstub_shared_EXPORTS)
#if defined (_MSC_VER) || defined(__MINGW32__) /* MSVC Compiler Case */
#define HDFFCLIBAPI extern __declspec(dllexport)
#elif (__GNUC__ >= 4) /* GCC 4.x has support for visibility options */
#define HDFFCLIBAPI extern __attribute__ ((visibility("default")))
#endif
-#endif/* hdf_test_fcstub_EXPORTS */
+#endif/* hdf_test_fcstub_shared_EXPORTS */
-#if defined(mfhdf_hdiff_EXPORTS)
+#if defined(mfhdf_hdiff_shared_EXPORTS)
#if defined (_MSC_VER) || defined(__MINGW32__) /* MSVC Compiler Case */
#define HDFPUBLIC __declspec(dllimport)
#define HDFLIBAPI extern __declspec(dllimport)
@@ -106,9 +106,9 @@
#define HDFLIBAPI extern __attribute__ ((visibility("default")))
#define HDFTOOLSAPI extern __attribute__ ((visibility("default")))
#endif
-#endif /* mfhdf_hdiff_EXPORTS */
+#endif /* mfhdf_hdiff_shared_EXPORTS */
-#if defined(mfhdf_hrepack_EXPORTS)
+#if defined(mfhdf_hrepack_shared_EXPORTS)
#if defined (_MSC_VER) || defined(__MINGW32__) /* MSVC Compiler Case */
#define HDFPUBLIC __declspec(dllimport)
#define HDFLIBAPI extern __declspec(dllimport)
@@ -118,7 +118,7 @@
#define HDFLIBAPI extern __attribute__ ((visibility("default")))
#define HDFTOOLSAPI extern __attribute__ ((visibility("default")))
#endif
-#endif /* mfhdf_hrepack_EXPORTS */
+#endif /* mfhdf_hrepack_shared_EXPORTS */
#if !defined(XDRLIBAPI)
#if defined (_MSC_VER) || defined(__MINGW32__) /* MSVC Compiler Case */
@@ -163,94 +163,13 @@
#endif
#endif
-#elif defined(H4_BUILT_AS_STATIC_LIB)
- #define XDRLIBAPI extern
- #define HDFERRPUBLIC extern
- #define HDFPUBLIC
- #define HDFLIBAPI extern
- #define HDFFCLIBAPI extern
- #define HDFTOOLSAPI extern
-
#else
-/* This is the original HDFGroup defined preprocessor code which should still work
- * with the VS projects that are maintained by "The HDF Group"
- * This will be removed after the next release.
- */
-
-#ifdef _WIN32
-/**
- * Under _WIN32 we have single threaded static libraries, or
- * mutli-threaded DLLs using the multithreaded runtime DLLs.
- **/
-# if defined(_MT) && defined(_DLL) &&!defined(_HDFDLL_)
-/* If the user really meant to use _HDFDLL_, but he forgot, just define it. */
-# define _HDFDLL_
-# endif
-
-# if !defined(_MT) && defined(_HDFDLL_)
-# error To use the HDF libraries from a single-threaded project, you must use static HDF libraries
-# error Undefine the macro "_HDFDLL_"
-# endif
-
-# if defined(xdr_EXPORTS)
-# define XDRLIBAPI extern __declspec(dllexport)
-# endif /* xdr_EXPORTS */
-
-# if defined(_HDFDLL_)
-# pragma warning( disable: 4273 ) /* Disable the stupid dll linkage warnings */
-# if defined(_HDFLIB_)
-# define HDFPUBLIC __declspec(dllexport)
-# define HDFLIBAPI extern __declspec(dllexport)
-# endif
-
-# if defined(_MFHDFLIB_)
-# define HDFLIBAPI extern __declspec(dllexport)
-# endif
-
-# if defined(_HDFLIB_C_STUB_EXPORTS) || defined(_MFHDFLIB_C_STUB_EXPORTS) || defined(_DLLLIBTEST_FCSTUB_EXPORTS)
-# define HDFFCLIBAPI extern __declspec(dllexport)
-# endif
-
-# if defined(_HDFLIB_C_STUB_EXPORTS)
-# define HDFPUBLIC __declspec(dllexport)
-# endif
-
-# if !defined(XDRLIBAPI)
-# define XDRLIBAPI extern __declspec(dllimport)
-# endif
-# if !defined(HDFERRPUBLIC)
- #define HDFERRPUBLIC extern __declspec(dllimport)
-# endif
-# if !defined(HDFPUBLIC)
-# define HDFPUBLIC __declspec(dllimport)
-# endif
-# if !defined(HDFLIBAPI)
-# define HDFLIBAPI extern __declspec(dllimport)
-# endif
-# if !defined(HDFFCLIBAPI)
-# define HDFFCLIBAPI extern __declspec(dllimport)
-# endif
-# if !defined(HDFTOOLSAPI)
-# define HDFTOOLSAPI extern __declspec(dllimport)
-# endif
-
-# else
-# define XDRLIBAPI extern
-# define HDFERRPUBLIC extern
-# define HDFPUBLIC
-# define HDFLIBAPI extern
-# define HDFFCLIBAPI extern
-# define HDFTOOLSAPI extern
-# endif
-#else /* !defined( _WIN32 ) */
# define XDRLIBAPI extern
# define HDFERRPUBLIC extern
# define HDFPUBLIC
# define HDFLIBAPI extern
# define HDFFCLIBAPI extern
# define HDFTOOLSAPI extern
-#endif
-
#endif /*H4_BUILT_AS_DYNAMIC_LIB */
diff --git a/hdf/src/Makefile.in b/hdf/src/Makefile.in
index 300567b..10e7dda 100644
--- a/hdf/src/Makefile.in
+++ b/hdf/src/Makefile.in
@@ -91,7 +91,20 @@ F77LINK = $(LIBTOOL) $(AM_V_lt) --tag=F77 $(AM_LIBTOOLFLAGS) \
$(AM_LDFLAGS) $(LDFLAGS) -o $@
subdir = hdf/src
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -289,12 +302,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -337,11 +365,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hdf/src/atom.c b/hdf/src/atom.c
index 135e1e2..72e17bc 100644
--- a/hdf/src/atom.c
+++ b/hdf/src/atom.c
@@ -11,12 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6140 $";
-#endif
-
-
-/* $Id: atom.c 6140 2014-10-10 02:24:32Z acheng $ */
+/* $Id: atom.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
FILE
diff --git a/hdf/src/bitvect.c b/hdf/src/bitvect.c
index 83cbc40..5efba61 100644
--- a/hdf/src/bitvect.c
+++ b/hdf/src/bitvect.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "$Revision: 6111 $";
-#endif
-
-/* $Id: bitvect.c 6111 2014-06-02 20:52:09Z bmribler $ */
+/* $Id: bitvect.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
FILE
diff --git a/hdf/src/cdeflate.c b/hdf/src/cdeflate.c
index d02b771..1a9648d 100644
--- a/hdf/src/cdeflate.c
+++ b/hdf/src/cdeflate.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "$Revision: 5489 $";
-#endif
-
-/* $Id: cdeflate.c 5489 2010-10-23 06:52:48Z bmribler $ */
+/* $Id: cdeflate.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
FILE
diff --git a/hdf/src/cnbit.c b/hdf/src/cnbit.c
index e91e8f7..ccf3b2e 100644
--- a/hdf/src/cnbit.c
+++ b/hdf/src/cnbit.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: cnbit.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: cnbit.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
FILE
diff --git a/hdf/src/cnone.c b/hdf/src/cnone.c
index cd8e91a..cb917b5 100644
--- a/hdf/src/cnone.c
+++ b/hdf/src/cnone.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: cnone.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: cnone.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
FILE
diff --git a/hdf/src/crle.c b/hdf/src/crle.c
index 9284046..d24f89d 100644
--- a/hdf/src/crle.c
+++ b/hdf/src/crle.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5705 $";
-#endif
-
-/* $Id: crle.c 5705 2011-10-26 12:45:21Z bmribler $ */
+/* $Id: crle.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
FILE
diff --git a/hdf/src/cskphuff.c b/hdf/src/cskphuff.c
index 1e289ad..56897ef 100644
--- a/hdf/src/cskphuff.c
+++ b/hdf/src/cskphuff.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6032 $";
-#endif
-
-/* $Id: cskphuff.c 6032 2014-01-17 18:13:52Z acheng $ */
+/* $Id: cskphuff.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
FILE
diff --git a/hdf/src/cszip.c b/hdf/src/cszip.c
index cf3e2b2..5e54da1 100644
--- a/hdf/src/cszip.c
+++ b/hdf/src/cszip.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6066 $";
-#endif
-
-/* $Id: cszip.c 6066 2014-02-03 16:30:04Z derobins $ */
+/* $Id: cszip.c 6357 2016-05-13 05:00:06Z bmribler $ */
/* General HDF includes */
diff --git a/hdf/src/df24.c b/hdf/src/df24.c
index a1567b3..4158fa2 100644
--- a/hdf/src/df24.c
+++ b/hdf/src/df24.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4924 $";
-#endif
-
-/* $Id: df24.c 4924 2007-09-05 21:55:40Z fbaker $ */
+/* $Id: df24.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: df24.c
diff --git a/hdf/src/df24f.c b/hdf/src/df24f.c
index a2b7cbb..8884b81 100644
--- a/hdf/src/df24f.c
+++ b/hdf/src/df24f.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: df24f.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: df24f.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: df24F.c
diff --git a/hdf/src/dfan.c b/hdf/src/dfan.c
index 5de6e82..df69616 100644
--- a/hdf/src/dfan.c
+++ b/hdf/src/dfan.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5322 $";
-#endif
-
-/* $Id: dfan.c 5322 2010-01-19 06:26:11Z brtnfld $ */
+/* $Id: dfan.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: dfan.c
diff --git a/hdf/src/dfanf.c b/hdf/src/dfanf.c
index 635ac6f..a8b3b3a 100644
--- a/hdf/src/dfanf.c
+++ b/hdf/src/dfanf.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5322 $";
-#endif
-
-/* $Id: dfanf.c 5322 2010-01-19 06:26:11Z brtnfld $ */
+/* $Id: dfanf.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: dfanF.c
diff --git a/hdf/src/dfcomp.c b/hdf/src/dfcomp.c
index 89c6785..34cc3da 100644
--- a/hdf/src/dfcomp.c
+++ b/hdf/src/dfcomp.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: dfcomp.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: dfcomp.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: dfcomp.c
diff --git a/hdf/src/dfconv.c b/hdf/src/dfconv.c
index 184d8c5..f6617f4 100644
--- a/hdf/src/dfconv.c
+++ b/hdf/src/dfconv.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6032 $";
-#endif
-
-/* $Id: dfconv.c 6032 2014-01-17 18:13:52Z acheng $ */
+/* $Id: dfconv.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*------------------------------------------------------------------
File: dfconv.c
diff --git a/hdf/src/dff.c b/hdf/src/dff.c
index f9813d7..3e1ca2e 100644
--- a/hdf/src/dff.c
+++ b/hdf/src/dff.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: dff.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: dff.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: dfF.c
diff --git a/hdf/src/dfgr.c b/hdf/src/dfgr.c
index 83c9f9a..ca85bf0 100644
--- a/hdf/src/dfgr.c
+++ b/hdf/src/dfgr.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: dfgr.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: dfgr.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: dfgr.c
diff --git a/hdf/src/dfgroup.c b/hdf/src/dfgroup.c
index 20cc232..e75bae6 100644
--- a/hdf/src/dfgroup.c
+++ b/hdf/src/dfgroup.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: dfgroup.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: dfgroup.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: dfgroup.c
diff --git a/hdf/src/dfimcomp.c b/hdf/src/dfimcomp.c
index 1e049e3..6b57f53 100644
--- a/hdf/src/dfimcomp.c
+++ b/hdf/src/dfimcomp.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6043 $";
-#endif
-
-/* $Id: dfimcomp.c 6043 2014-01-21 21:09:03Z acheng $ */
+/* $Id: dfimcomp.c 6357 2016-05-13 05:00:06Z bmribler $ */
/************************************************************************/
/* Module Name : imcomp */
diff --git a/hdf/src/dfjpeg.c b/hdf/src/dfjpeg.c
index 5bc8ce0..3b8e4f7 100644
--- a/hdf/src/dfjpeg.c
+++ b/hdf/src/dfjpeg.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4951 $";
-#endif
-
-/* $Id: dfjpeg.c 4951 2007-09-11 19:33:41Z epourmal $ */
+/* $Id: dfjpeg.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: dfjpeg.c
diff --git a/hdf/src/dfknat.c b/hdf/src/dfknat.c
index 4f8f54a..c1069f5 100644
--- a/hdf/src/dfknat.c
+++ b/hdf/src/dfknat.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6032 $";
-#endif
-
-/* $Id: dfknat.c 6032 2014-01-17 18:13:52Z acheng $ */
+/* $Id: dfknat.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*------------------------------------------------------------------
File: dfknat.c
diff --git a/hdf/src/dfkswap.c b/hdf/src/dfkswap.c
index 60c5d52..fc42de5 100644
--- a/hdf/src/dfkswap.c
+++ b/hdf/src/dfkswap.c
@@ -11,12 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: dfkswap.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: dfkswap.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*------------------------------------------------------------------
File: dfkswap.c
diff --git a/hdf/src/dfp.c b/hdf/src/dfp.c
index ef0a09d..48cbae5 100644
--- a/hdf/src/dfp.c
+++ b/hdf/src/dfp.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: dfp.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: dfp.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: dfp.c
diff --git a/hdf/src/dfpf.c b/hdf/src/dfpf.c
index 5c7f0aa..be63a08 100644
--- a/hdf/src/dfpf.c
+++ b/hdf/src/dfpf.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: dfpf.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: dfpf.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: dfpF.c
diff --git a/hdf/src/dfr8.c b/hdf/src/dfr8.c
index 17c03e6..8320e71 100644
--- a/hdf/src/dfr8.c
+++ b/hdf/src/dfr8.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: dfr8.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: dfr8.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: dfr8.c
diff --git a/hdf/src/dfr8f.c b/hdf/src/dfr8f.c
index aac7c82..522654b 100644
--- a/hdf/src/dfr8f.c
+++ b/hdf/src/dfr8f.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: dfr8f.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: dfr8f.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: dfr8F.c
diff --git a/hdf/src/dfrle.c b/hdf/src/dfrle.c
index 44e1181..cb49b24 100644
--- a/hdf/src/dfrle.c
+++ b/hdf/src/dfrle.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: dfrle.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: dfrle.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: dfrle.c
diff --git a/hdf/src/dfsd.c b/hdf/src/dfsd.c
index 5d27ff0..99ded28 100644
--- a/hdf/src/dfsd.c
+++ b/hdf/src/dfsd.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6032 $";
-#endif
-
-/* $Id: dfsd.c 6032 2014-01-17 18:13:52Z acheng $ */
+/* $Id: dfsd.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
File: dfsd.c
diff --git a/hdf/src/dfsdf.c b/hdf/src/dfsdf.c
index f671942..cd60f35 100644
--- a/hdf/src/dfsdf.c
+++ b/hdf/src/dfsdf.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: dfsdf.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: dfsdf.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: dfsdf.c
diff --git a/hdf/src/dfstubs.c b/hdf/src/dfstubs.c
index c16b50c..ee3103b 100644
--- a/hdf/src/dfstubs.c
+++ b/hdf/src/dfstubs.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6036 $";
-#endif
-
-/* $Id: dfstubs.c 6036 2014-01-20 17:28:01Z acheng $ */
+/* $Id: dfstubs.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
** FILE
diff --git a/hdf/src/dfufp2i.c b/hdf/src/dfufp2i.c
index d09d068..d4ba24c 100644
--- a/hdf/src/dfufp2i.c
+++ b/hdf/src/dfufp2i.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: dfufp2i.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: dfufp2i.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------c
* dfufp2i.c
diff --git a/hdf/src/dfufp2i.h b/hdf/src/dfufp2i.h
index 676c653..f3c5289 100644
--- a/hdf/src/dfufp2i.h
+++ b/hdf/src/dfufp2i.h
@@ -11,7 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/* $Id: dfufp2i.h 5949 2013-06-10 16:16:09Z byrn $ */
+/* $Id: dfufp2i.h 6220 2015-03-05 20:06:46Z byrn $ */
#ifndef DFUFP2IM_H /* avoid re-inclusion */
#define DFUFP2IM_H
@@ -79,6 +79,10 @@ extern "C"
(float32 *scale, int32 dim, int32 *offsets, int32 res);
HDFLIBAPI int pixrep_simple
(Input * in, Output * out);
+ HDFLIBAPI int DFUfptoimage
+ (int32 hdim, int32 vdim, float32 max, float32 min,
+ float32 *hscale, float32 *vscale, float32 *data, uint8 *palette,
+ char *outfile, int ct_method, int32 hres, int32 vres, int compress);
#if defined c_plusplus || defined __cplusplus
}
diff --git a/hdf/src/dfunjpeg.c b/hdf/src/dfunjpeg.c
index f2bfe6a..a2a305f 100644
--- a/hdf/src/dfunjpeg.c
+++ b/hdf/src/dfunjpeg.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: dfunjpeg.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: dfunjpeg.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: dfunjpeg.c
diff --git a/hdf/src/dfutil.c b/hdf/src/dfutil.c
index a1540a4..bd3b51b 100644
--- a/hdf/src/dfutil.c
+++ b/hdf/src/dfutil.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: dfutil.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: dfutil.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: dfutil.c
diff --git a/hdf/src/dfutilf.c b/hdf/src/dfutilf.c
index ed136a7..6745d21 100644
--- a/hdf/src/dfutilf.c
+++ b/hdf/src/dfutilf.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: dfutilf.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: dfutilf.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: dfutilf.c
diff --git a/hdf/src/dynarray.c b/hdf/src/dynarray.c
index e8e02c2..ee508a0 100644
--- a/hdf/src/dynarray.c
+++ b/hdf/src/dynarray.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: dynarray.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: dynarray.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
FILE
diff --git a/hdf/src/glist.c b/hdf/src/glist.c
index 520531a..cac7ad6 100644
--- a/hdf/src/glist.c
+++ b/hdf/src/glist.c
@@ -20,10 +20,6 @@
1996/06/04 - George V.
************************************************************************/
-#ifdef RCSID
-static char RcsId[] = "@(#)$Id: glist.c 6043 2014-01-21 21:09:03Z acheng $";
-#endif
-
#include <stdio.h>
#include <stdlib.h>
#include "glist.h"
diff --git a/hdf/src/hbitio.c b/hdf/src/hbitio.c
index e8aca1f..a32f5a8 100644
--- a/hdf/src/hbitio.c
+++ b/hdf/src/hbitio.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: hbitio.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: hbitio.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
FILE
diff --git a/hdf/src/hblocks.c b/hdf/src/hblocks.c
index b0b04d5..987989f 100644
--- a/hdf/src/hblocks.c
+++ b/hdf/src/hblocks.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6026 $";
-#endif
-
-/* $Id: hblocks.c 6026 2014-01-16 15:16:16Z bmribler $ */
+/* $Id: hblocks.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*LINTLIBRARY */
/* ------------------------------ hblocks.c -------------------------------
diff --git a/hdf/src/hbuffer.c b/hdf/src/hbuffer.c
index 35aa62d..0270c8c 100644
--- a/hdf/src/hbuffer.c
+++ b/hdf/src/hbuffer.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: hbuffer.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: hbuffer.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*LINTLIBRARY */
/* ------------------------------ hbuffer.c -------------------------------
diff --git a/hdf/src/hchunks.c b/hdf/src/hchunks.c
index b1aac47..f2fa1ab 100644
--- a/hdf/src/hchunks.c
+++ b/hdf/src/hchunks.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5830 $";
-#endif
-
-/* $Id: hchunks.c 5830 2012-07-19 09:08:48Z bmribler $ */
+/* $Id: hchunks.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*LINTLIBRARY */
/* ------------------------------ HMCxxx -------------------------------
diff --git a/hdf/src/hcomp.c b/hdf/src/hcomp.c
index 96748d3..e60ae8a 100644
--- a/hdf/src/hcomp.c
+++ b/hdf/src/hcomp.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5887 $";
-#endif
-
-/* $Id: hcomp.c 5887 2012-10-08 04:57:47Z bmribler $ */
+/* $Id: hcomp.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
FILE
@@ -1980,8 +1976,8 @@ HCPgetdatasize(int32 file_id,
HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
- /* unlimited dimension falls in here */
- else if (sp_tag == SPECIAL_LINKED)
+ /* unlimited dimension and external data fall in here */
+ else if (sp_tag == SPECIAL_LINKED || sp_tag == SPECIAL_EXT)
{
INT32DECODE(p, len); /* get total data length */
*orig_size = *comp_size = len; /* set data sizes */
diff --git a/hdf/src/hcompri.c b/hdf/src/hcompri.c
index 39b9016..d0d8a27 100644
--- a/hdf/src/hcompri.c
+++ b/hdf/src/hcompri.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: hcompri.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: hcompri.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*LINTLIBRARY */
/* ------------------------------ hcompri.c -------------------------------
diff --git a/hdf/src/hdfalloc.c b/hdf/src/hdfalloc.c
index 26fb9ec..4e6cb4d 100644
--- a/hdf/src/hdfalloc.c
+++ b/hdf/src/hdfalloc.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6043 $";
-#endif
-
-/* $Id: hdfalloc.c 6043 2014-01-21 21:09:03Z acheng $ */
+/* $Id: hdfalloc.c 6427 2016-06-13 21:56:09Z byrn $ */
#ifdef MALDEBUG
#define __MALDEBUG__
@@ -118,7 +114,7 @@ HDmemfill(void * dest, const void * src, uint32 item_size, uint32 num_items)
EXAMPLES
REVISION LOG
Sep 19, 11 - Changed last argument's type from int32 to intn. It didn't
- make sense to have to cast in most places. -BMR
+ make sense to have to cast in most places. -BMR
--------------------------------------------------------------------------*/
char *
HIstrncpy(char *dest, const char *source, intn len)
@@ -258,7 +254,6 @@ HDcalloc(uint32 n, uint32 size)
} /* end HDcalloc() */
#endif /* MALLOC_CHECK */
-#if defined IBM6000 || defined ANSISUN || defined IRIX
/*--------------------------------------------------------------------------
NAME
HDstrdup -- in-library replacement for non-ANSI strdup()
@@ -278,13 +273,18 @@ HDcalloc(uint32 n, uint32 size)
char *
HDstrdup(const char *s)
{
- char *ret;
+ char *ret;
+
+ /* Make sure original string is not NULL */
+ if (s == NULL)
+ return(NULL);
+ /* Allocate space */
ret = (char *) HDmalloc((uint32) HDstrlen(s) + 1);
if (ret == NULL)
return (NULL);
+
+ /* Copy the original string and return it */
HDstrcpy(ret, s);
return (ret);
} /* end HDstrdup() */
-
-#endif /* macinosh */
diff --git a/hdf/src/hdfi.h b/hdf/src/hdfi.h
index e645965..bdab3e9 100644
--- a/hdf/src/hdfi.h
+++ b/hdf/src/hdfi.h
@@ -11,7 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/* $Id: hdfi.h 6043 2014-01-21 21:09:03Z acheng $ */
+/* $Id: hdfi.h 6446 2016-06-15 16:54:36Z bmribler $ */
#ifndef HDFI_H
#define HDFI_H
@@ -1273,10 +1273,6 @@ correctly.
# define HDstrchr(s,c) (strchr((s),(c)))
# define HDstrrchr(s,c) (strrchr((s),(c)))
# define HDstrtol(s,e,b) (strtol((s),(e),(b)))
-/* non-standard function, not defined on the following machines - */
-#if !(defined IBM6000 || defined ANSISUN )
-# define HDstrdup(s) ((char *)strdup((const char *)(s)))
-#endif /* !(etc..) */
/**************************************************************************
@@ -1302,7 +1298,9 @@ correctly.
#endif /* !SUN & GCC */
/* Compatibility #define for V3.3, should be taken out by v4.0 - QAK */
-#define DFSDnumber DFSDndatasets
+/* Commented out only, just in case any legacy code is still using it out there.
+ Will be removed in a few maintenance releases. -BMR, Jun 5, 2016
+#define DFSDnumber DFSDndatasets */
#endif /* HDFI_H */
diff --git a/hdf/src/herr.c b/hdf/src/herr.c
index 83aa918..99f7f29 100644
--- a/hdf/src/herr.c
+++ b/hdf/src/herr.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: herr.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: herr.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*LINTLIBRARY */
/* -------------------------------- herr.c -------------------------------- */
diff --git a/hdf/src/herr.h b/hdf/src/herr.h
index 52d7dae..1df813e 100644
--- a/hdf/src/herr.h
+++ b/hdf/src/herr.h
@@ -11,7 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/* $Id: herr.h 5759 2012-01-19 22:34:47Z byrn $ */
+/* $Id: herr.h 6443 2016-06-15 02:10:31Z bmribler $ */
/*+ herr.h
*** header file for using error routines
@@ -92,34 +92,26 @@
/* always points to the next available slot; the last error record is in slot (top-1) */
#if defined(H4_BUILT_AS_DYNAMIC_LIB)
-#ifdef _H_ERR_MASTER_
-#if defined _WIN32 && defined hdf_EXPORTS
+# ifdef _H_ERR_MASTER_
+# if defined _WIN32 && defined hdf_shared_EXPORTS
__declspec(dllexport)
-#endif
-#else
+# endif
+# else
HDFERRPUBLIC
-#endif /* _H_ERR_MASTER_ */
+# endif /* _H_ERR_MASTER_ */
int32 error_top
-#ifdef _H_ERR_MASTER_
+# ifdef _H_ERR_MASTER_
= 0
-#endif /* _H_ERR_MASTER_ */
+# endif /* _H_ERR_MASTER_ */
;
#else /* defined(H4_BUILT_AS_DYNAMIC_LIB) */
-#ifndef _H_ERR_MASTER_
-#if defined _WIN32 && defined HDFAPDLL
-__declspec(dllimport)
-#else
+# ifndef _H_ERR_MASTER_
HDFERRPUBLIC
-#endif
-#else
-#if defined _WIN32 && defined HDFLIBDLL
-__declspec(dllexport)
-#endif
-#endif /* _H_ERR_MASTER_ */
+# endif /* _H_ERR_MASTER_ */
int32 error_top
-#ifdef _H_ERR_MASTER_
+# ifdef _H_ERR_MASTER_
= 0
-#endif /* _H_ERR_MASTER_ */
+# endif /* _H_ERR_MASTER_ */
;
#endif /* defined(H4_BUILT_AS_DYNAMIC_LIB) */
@@ -153,6 +145,7 @@ typedef enum
DFE_SEEKERROR, /* There was a seek error */
DFE_RDONLY, /* The DF is read only */
DFE_BADSEEK, /* Attempt to seek past end of element */
+ DFE_INVFILE, /* File is neither hdf, cdf, netcdf */
/* Low-level HDF I/O errors */
DFE_PUTELEM, /* Hputelement failed in some way */
@@ -207,6 +200,7 @@ typedef enum
DFE_ARGS, /* bad arguments to routine */
DFE_INTERNAL, /* serious internal error */
DFE_NORESET, /* Too late to modify this value */
+ DFE_EXCEEDMAX, /* Value exceeds max allowed */
DFE_GENAPP, /* Generic application,level error */
/* Generic interface errors */
@@ -224,6 +218,7 @@ typedef enum
DFE_RANGE, /* improper range for attempted acess */
DFE_BADCONV, /* Don't know how to convert data type */
DFE_BADTYPE, /* Incompatible types specified */
+ DFE_BADDIMNAME, /* Dimension name not valid or already taken */
DFE_NOVGREP, /* No Vgroup representation for SDS and dim */
/* Compression errors */
@@ -284,6 +279,7 @@ typedef enum
DFE_VSREAD, /* Error reading from VData */
DFE_BADVH, /* Error in VData Header */
DFE_FIELDSSET, /* Fields already set for vdata */
+
/* High-level Vdata/Vset errors */
DFE_VSCANTCREATE, /* Cannot create VData */
DFE_VGCANTCREATE, /* Cannot create VGroup */
@@ -292,6 +288,9 @@ typedef enum
DFE_CANTATTACH, /* Cannot attach to a VData/Vset */
DFE_CANTDETACH, /* Cannot detach a VData/Vset with access 'w' */
+/* XDR level errors */
+ DFE_XDRERROR, /* Error occur in XDR and/or CDF level */
+
/* bit I/O errors */
DFE_BITREAD, /* There was a bit-read error */
DFE_BITWRITE, /* There was a bit-write error */
@@ -304,7 +303,14 @@ typedef enum
DFE_BVNEW, /* Failed to create a bit-vector */
DFE_BVSET, /* Failed when setting a bit in a bit-vector */
DFE_BVGET, /* Failed when getting a bit in a bit-vector */
- DFE_BVFIND /* Failed when finding a bit in a bit-vector */
+ DFE_BVFIND, /* Failed when finding a bit in a bit-vector */
+
+/* General to all interfaces */
+ DFE_CANTSETATTR, /* Failed to add an attribute */
+ DFE_CANTGETATTR, /* Failed to find or get an attribute */
+
+/* Annotation interface errors */
+ DFE_ANAPIERROR /* Failed in annotation interface */
}
hdf_err_code_t;
@@ -339,6 +345,7 @@ PRIVATE const struct error_messages_t error_messages[] =
{DFE_SEEKERROR, "Error performing seek operation"},
{DFE_RDONLY, "Attempt to write to read-only HDF file"},
{DFE_BADSEEK, "Attempt to seek past end of element"},
+ {DFE_INVFILE, "File is supported, must be either hdf, cdf, netcdf"},
/* Low-level HDF I/O errors */
{DFE_PUTELEM, "Hputelement failed in some way"},
@@ -393,6 +400,7 @@ PRIVATE const struct error_messages_t error_messages[] =
{DFE_ARGS, "Invalid arguments to routine"},
{DFE_INTERNAL, "HDF Internal error"},
{DFE_NORESET, "Can not reset this value"},
+ {DFE_EXCEEDMAX, "Value exceeds max allowed"},
{DFE_GENAPP, "Generic application-level error"},
/* Generic interface errors */
@@ -410,6 +418,8 @@ PRIVATE const struct error_messages_t error_messages[] =
{DFE_RANGE, "Improper range for attempted access"},
{DFE_BADCONV, "Don't know how to convert data type"},
{DFE_BADTYPE, "Incompatible type specified"},
+ {DFE_BADDIMNAME, "Dimension name not valid or already taken"},
+ {DFE_NOVGREP, "No Vgroup representation for SDS and dim"},
/* Compression errors */
{DFE_BADSCHEME, "Unknown compression scheme specified"},
@@ -427,13 +437,17 @@ PRIVATE const struct error_messages_t error_messages[] =
{DFE_CANTCOMP, "Can't compress an object"},
{DFE_CANTDECOMP, "Can't de-compress an object"},
{DFE_NOENCODER, "Encoder not available"},
+ {DFE_NOSZLIB, "SZIP library not available"},
+ {DFE_COMPVERSION, "Z_VERSION_ERROR (-6) returned from zlib"},
+ {DFE_READCOMP, "Error in reading compressed data"},
/* Raster errors */
- {DFE_NODIM, "No dimension record associated with image"},
+ {DFE_NODIM, "No dimension record associated with image or data set"},
{DFE_BADRIG, "Error processing a RIG"},
{DFE_RINOTFOUND, "Can't find raster image"},
{DFE_BADATTR, "Bad Attribute"},
{DFE_LUTNOTFOUND, "No palette information for RIG"},
+ {DFE_GRNOTFOUND, "Can't find specified GR"},
/* SDG/NDG errors */
{DFE_BADTABLE, "The nsdg table is wrong"},
@@ -456,6 +470,7 @@ PRIVATE const struct error_messages_t error_messages[] =
{DFE_BADVSCLASS, "Cannot set VData class"},
{DFE_VSWRITE, "Error writing to VData"},
{DFE_VSREAD, "Error reading from VData"},
+ {DFE_BADVH, "Error in VData Header"},
{DFE_FIELDSSET, "Fields already set for vdata"},
/* High-level Vdata/Vset errors */
@@ -466,6 +481,9 @@ PRIVATE const struct error_messages_t error_messages[] =
{DFE_CANTATTACH, "Cannot attach to a VData"},
{DFE_CANTDETACH, "Cannot detach a VData with access 'w'"},
+/* XDR level errors */
+ {DFE_XDRERROR, "Error from XDR and/or CDF level"},
+
/* bit I/O errors */
{DFE_BITREAD, "There was a bit-read error"},
{DFE_BITWRITE, "There was a bit-write error"},
@@ -478,7 +496,15 @@ PRIVATE const struct error_messages_t error_messages[] =
{DFE_BVNEW, "Failed to create a bit-vector"},
{DFE_BVSET, "Failed when setting a bit in a bit-vector"},
{DFE_BVGET, "Failed when getting a bit in a bit-vector"},
- {DFE_BVFIND, "Failed when finding a bit in a bit-vector"}
+ {DFE_BVFIND, "Failed when finding a bit in a bit-vector"},
+
+/* General to all interfaces */
+ {DFE_CANTSETATTR, "Cannot set an attribute"},
+ {DFE_CANTGETATTR, "Cannot find or get an attribute"},
+
+/* Annotation interface errors */
+ {DFE_ANAPIERROR, "Failed in annotation interface"}
+
};
#endif /* _H_ERR_MASTER_ */
diff --git a/hdf/src/herrf.c b/hdf/src/herrf.c
index 6151676..a84d7ae 100644
--- a/hdf/src/herrf.c
+++ b/hdf/src/herrf.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: herrf.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: herrf.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: herrf.c
diff --git a/hdf/src/hextelt.c b/hdf/src/hextelt.c
index cc05323..37b38e9 100644
--- a/hdf/src/hextelt.c
+++ b/hdf/src/hextelt.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6043 $";
-#endif
-
-/* $Id: hextelt.c 6043 2014-01-21 21:09:03Z acheng $ */
+/* $Id: hextelt.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*LINTLIBRARY */
/* ------------------------------ hextelt.c -------------------------------
@@ -65,7 +61,7 @@ static char RcsId[] = "@(#)$Revision: 6043 $";
HXPcloseAID -- close file but keep AID active
HXPendacess -- close file, free AID
HXPinfo -- return info about an external element
- HXPinquire -- retreive information about an external element
+ HXPinquire -- retrieve information about an external element
HXPread -- read some data out of an external file
HXPreset -- replace the current external info with new info
HXPseek -- set the seek position
@@ -180,6 +176,12 @@ DESCRIPTION
successful execution. FAIL is returned if any error is encountered.
FORTRAN
None
+MODIFICATION
+ Previously, the data_len used was incorrect when the element is
+ already special, either linked-block or external data, in which
+ case, the data_len was the length of the special info, not the data.
+ Changed to use correct data_len when the element is already special.
+ (HDFFR-1516) -BMR, Sep 5, 2015
--------------------------------------------------------------------------*/
int32
@@ -206,57 +208,72 @@ HXcreate(int32 file_id, uint16 tag, uint16 ref, const char *extern_file_name, in
|| (special_tag = MKSPECIALTAG(tag)) == DFTAG_NULL)
HGOTO_ERROR(DFE_ARGS, FAIL);
+ /* Make sure file has write access */
if (!(file_rec->access & DFACC_WRITE))
HGOTO_ERROR(DFE_DENIED, FAIL);
- /* get a access records */
- access_rec = HIget_access_rec();
- if (access_rec == NULL)
- HGOTO_ERROR(DFE_TOOMANY, FAIL);
-
- /* search for identical dd */
+ /* Get access to the DD of this tag/ref */
if ((data_id=HTPselect(file_rec,tag,ref))!=FAIL)
{
- /* Check if the element is already special */
+ /* If element is already special, proceed according to special type */
if (HTPis_special(data_id)==TRUE)
{
sp_info_block_t sp_info;
int32 aid, retcode;
- aid = Hstartread(file_id, tag, ref);
+ /* Get read access on the tag/ref */
+ if ((aid = Hstartread(file_id, tag, ref)) == FAIL)
+ HGOTO_ERROR(DFE_NOMATCH, FAIL);
+
+ /* Get the special info structure */
retcode = HDget_special_info(aid, &sp_info);
- Hendaccess(aid);
+
if ((retcode == FAIL) || (sp_info.key == FAIL))
HGOTO_ERROR(DFE_CANTMOD, FAIL);
+ /* We can proceed with linked-block and external, but
+ not compression special element */
switch(sp_info.key)
- {
- /* we can proceed with these types of special elements */
+ {
case SPECIAL_LINKED:
+ if (HDinqblockinfo(aid, &data_len, NULL, NULL, NULL) == FAIL)
+ {
+ Hendaccess(aid);
+ HRETURN_ERROR(DFE_INTERNAL, FAIL);
+ }
+ break;
case SPECIAL_EXT:
+ data_len = sp_info.length;
break;
-
- /* abort since we cannot convert the data element to an external data element */
case SPECIAL_COMP:
default:
HTPendaccess(data_id);
+ Hendaccess(aid);
HGOTO_ERROR(DFE_CANTMOD, FAIL);
- } /* switch */
- } /* end if */
-
- /* get the info for the dataset */
- if(HTPinquire(data_id,NULL,NULL,NULL,&data_len)==FAIL)
- {
- HTPendaccess(data_id);
- HGOTO_ERROR(DFE_INTERNAL, FAIL);
- } /* end if */
+ } /* switch */
+ /* Close access on this special element */
+ Hendaccess(aid);
+ } /* end if data_id is special */
+
+ else
+ { /* not special */
+ /* Then use HTPinquire to get the length of the data. Note: when
+ this tag is special, this length is the length of the special
+ info only, not data. */
+ if (HTPinquire(data_id,NULL,NULL,NULL,&data_len)==FAIL)
+ {
+ HTPendaccess(data_id);
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
+ } /* end if */
+ }
} /* end if */
/* build the customized external file name. */
if (!(fname = HXIbuildfilename(extern_file_name, DFACC_CREATE)))
HGOTO_ERROR(DFE_BADOPEN, FAIL);
- /* create the external file */
+ /* Try to open the external file with write access first, if that fails,
+ create it */
file_external = (hdf_file_t)HI_OPEN(fname, DFACC_WRITE);
if (OPENERR(file_external))
{
@@ -266,12 +283,18 @@ HXcreate(int32 file_id, uint16 tag, uint16 ref, const char *extern_file_name, in
}
HDfree(fname);
- /* set up the special element information and write it to file */
+ /* Get a bare access record and special info structure */
+ access_rec = HIget_access_rec();
+ if (access_rec == NULL)
+ HGOTO_ERROR(DFE_TOOMANY, FAIL);
+
access_rec->special_info = HDmalloc((uint32) sizeof(extinfo_t));
info = (extinfo_t *) access_rec->special_info;
if (!info)
HGOTO_ERROR(DFE_NOSPACE, FAIL);
+ /* If there is data, either regular or special, read the data then write
+ it to the external file, otherwise, do nothing */
if (data_id!=FAIL && data_len>0)
{
if ((buf = HDmalloc((uint32) data_len)) == NULL)
@@ -287,15 +310,15 @@ HXcreate(int32 file_id, uint16 tag, uint16 ref, const char *extern_file_name, in
else
info->length = start_len;
+ /* Set up the special element information and write it to file */
info->attached = 1;
info->file_open = TRUE;
info->file_external = file_external;
info->extern_offset = offset;
info->extern_file_name = (char *) HDstrdup(extern_file_name);
if (!info->extern_file_name)
- HGOTO_ERROR(DFE_NOSPACE, FAIL);
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
- /* Getting ready to write out special info struct */
info->length_file_name = (int32)HDstrlen(extern_file_name);
{
uint8 *p = local_ptbuf;
@@ -306,19 +329,25 @@ HXcreate(int32 file_id, uint16 tag, uint16 ref, const char *extern_file_name, in
INT32ENCODE(p, info->length_file_name);
HDstrcpy((char *) p, extern_file_name);
}
+
+ /* Free up the current DD */
if(data_id!=FAIL)
if (HTPdelete(data_id) == FAIL)
HGOTO_ERROR(DFE_CANTDELDD, FAIL);
- /* write the special info structure to file */
+ /* Write the special info structure to file */
if((dd_aid=Hstartaccess(file_id,special_tag,ref,DFACC_ALL))==FAIL)
HGOTO_ERROR(DFE_CANTACCESS, FAIL);
if (Hwrite(dd_aid, 14+info->length_file_name, local_ptbuf) == FAIL)
+ {
+ Hendaccess(dd_aid);
HGOTO_ERROR(DFE_WRITEERROR, FAIL);
+ }
if(Hendaccess(dd_aid)==FAIL)
HGOTO_ERROR(DFE_CANTENDACCESS, FAIL);
+ dd_aid = FAIL;
- /* update access record and file record */
+ /* Update access record and file record */
if((access_rec->ddid=HTPselect(file_rec,special_tag,ref))==FAIL)
HGOTO_ERROR(DFE_INTERNAL, FAIL);
access_rec->special_func = &ext_funcs;
diff --git a/hdf/src/hfile.c b/hdf/src/hfile.c
index d992a4c..688895a 100644
--- a/hdf/src/hfile.c
+++ b/hdf/src/hfile.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6043 $";
-#endif
-
-/* $Id: hfile.c 6043 2014-01-21 21:09:03Z acheng $ */
+/* $Id: hfile.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*LINTLIBRARY */
/*+
diff --git a/hdf/src/hfile.h b/hdf/src/hfile.h
index fb9146e..476ca40 100644
--- a/hdf/src/hfile.h
+++ b/hdf/src/hfile.h
@@ -11,7 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/* $Id: hfile.h 6194 2015-02-05 14:17:12Z bmribler $ */
+/* $Id: hfile.h 6473 2016-06-24 21:38:14Z bmribler $ */
/*+ hfile.h
*** Header for hfile.c, routines for low level data element I/O
@@ -51,10 +51,10 @@
#define LIBVER_MAJOR 4
#define LIBVER_MINOR 2
-#define LIBVER_RELEASE 11
+#define LIBVER_RELEASE 12
#define LIBVER_SUBRELEASE "" /* For pre-releases like snap0 */
/* Empty string for real releases. */
-#define LIBVER_STRING "HDF Version 4.2 Release 11, February 5, 2015"
+#define LIBVER_STRING "HDF Version 4.2 Release 12, June 24, 2016"
#define LIBVSTR_LEN 80 /* length of version string */
#define LIBVER_LEN 92 /* 4+4+4+80 = 92 */
/* end of version tags */
diff --git a/hdf/src/hfiledd.c b/hdf/src/hfiledd.c
index 03bf40b..fbfc014 100644
--- a/hdf/src/hfiledd.c
+++ b/hdf/src/hfiledd.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5584 $";
-#endif
-
-/* $Id: hfiledd.c 5584 2011-04-13 18:25:06Z bmribler $ */
+/* $Id: hfiledd.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
FILE
diff --git a/hdf/src/hfilef.c b/hdf/src/hfilef.c
index c4c7b24..9a58d19 100644
--- a/hdf/src/hfilef.c
+++ b/hdf/src/hfilef.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: hfilef.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: hfilef.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: hfilef.c
diff --git a/hdf/src/hkit.c b/hdf/src/hkit.c
index 9bea081..e62ad67 100644
--- a/hdf/src/hkit.c
+++ b/hdf/src/hkit.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6043 $";
-#endif
-
-/* $Id: hkit.c 6043 2014-01-21 21:09:03Z acheng $ */
+/* $Id: hkit.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include <ctype.h>
#include "hkit.h"
diff --git a/hdf/src/hproto.h b/hdf/src/hproto.h
index 54c7424..4e96271 100644
--- a/hdf/src/hproto.h
+++ b/hdf/src/hproto.h
@@ -11,7 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/* $Id: hproto.h 6043 2014-01-21 21:09:03Z acheng $ */
+/* $Id: hproto.h 6427 2016-06-13 21:56:09Z byrn $ */
#ifndef _H_PROTO
#define _H_PROTO
@@ -401,12 +401,9 @@ HDFLIBAPI intn Hdeldd(int32 file_id, /* IN: File ID the tag/refs are in */
#endif /* defined MALLOC_CHECK */
-#if defined IBM6000 || defined SUN
HDFPUBLIC extern char *HDstrdup
(const char *s);
-#endif
-
HDFLIBAPI intn HDc2fstr
(char * str, intn len);
diff --git a/hdf/src/linklist.c b/hdf/src/linklist.c
index 7249309..e4163b8 100644
--- a/hdf/src/linklist.c
+++ b/hdf/src/linklist.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: linklist.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: linklist.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
FILE
diff --git a/hdf/src/maldebug.c b/hdf/src/maldebug.c
index 4d6673f..24bafe3 100644
--- a/hdf/src/maldebug.c
+++ b/hdf/src/maldebug.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: maldebug.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: maldebug.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*----------------------------------------------------------------------
*
diff --git a/hdf/src/mcache.c b/hdf/src/mcache.c
index 4fa04e6..7212a94 100644
--- a/hdf/src/mcache.c
+++ b/hdf/src/mcache.c
@@ -39,11 +39,7 @@
* AUTHOR - George V.- 1996/08/22
*****************************************************************************/
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5146 $";
-#endif
-
-/* $Id: mcache.c 5146 2009-01-14 17:46:57Z fbaker $ */
+/* $Id: mcache.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
* NOTE:
diff --git a/hdf/src/mfan.c b/hdf/src/mfan.c
index b8c11e6..e424d72 100644
--- a/hdf/src/mfan.c
+++ b/hdf/src/mfan.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5454 $";
-#endif
-
-/* $Id: mfan.c 5454 2010-08-27 17:24:55Z bmribler $ */
+/* $Id: mfan.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: mfan.c
diff --git a/hdf/src/mfanf.c b/hdf/src/mfanf.c
index 2682512..097fbee 100644
--- a/hdf/src/mfanf.c
+++ b/hdf/src/mfanf.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: mfanf.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: mfanf.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: mfanf.c
diff --git a/hdf/src/mfgr.c b/hdf/src/mfgr.c
index 4b9b052..5f506ca 100644
--- a/hdf/src/mfgr.c
+++ b/hdf/src/mfgr.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "$Revision: 6135 $";
-#endif
-
-/* $Id: mfgr.c 6135 2014-09-12 00:13:35Z acheng $ */
+/* $Id: mfgr.c 6369 2016-05-24 05:25:11Z bmribler $ */
/*
FILE
@@ -189,6 +185,13 @@ static TBBT_TREE *gr_tree=NULL;
/* Whether we've installed the library termination function yet for this interface */
PRIVATE intn library_terminate = FALSE;
+typedef struct image_info_struct {
+ uint16 grp_tag,grp_ref; /* tag/ref of the group the image is in */
+ uint16 img_tag,img_ref; /* tag/ref of the image itself */
+ uint16 aux_ref; /* ref of aux. info about an image */
+ int32 offset; /* offset of the image data */
+} imginfo_t;
+
PRIVATE intn GRIupdatemeta(int32 hdf_file_id,ri_info_t *img_ptr);
PRIVATE intn GRIupdateRIG(int32 hdf_file_id,ri_info_t *img_ptr);
@@ -437,6 +440,107 @@ New_grfile(HFILEID f)
return(g);
} /* end New_grfile() */
+/* -------------------------- Store_imginfo ------------------------ */
+/*
+ Stores information into the image_info_struct.
+
+ Added to refactor repeated code. -BMR, Jun 7, 2015
+ */
+PRIVATE void
+Store_imginfo(
+ imginfo_t *imginfo,
+ uint16 grp_tag,
+ uint16 grp_ref,
+ uint16 img_tag,
+ uint16 img_ref)
+{
+ imginfo->grp_tag=(uint16)grp_tag;
+ imginfo->grp_ref=(uint16)grp_ref;
+ imginfo->img_tag=(uint16)img_tag;
+ imginfo->img_ref=(uint16)img_ref;
+} /* end Store_imginfo() */
+
+/* -------------------------- Get_oldimgs ------------------------ */
+/*
+ Find all images of tag searched_tag in the file and store their
+ information in the list of image_info_structs. The stored information
+ include the image's tag/ref, DFTAG_NULL/DFREF_WILDCARD for the group's
+ tag/ref, and the image's offset in the file.
+
+ Returns the number of images on success, or NULL, otherwise.
+
+ Added to refactor repeated code. -BMR, Jul 13, 2015
+ */
+PRIVATE intn
+Get_oldimgs(int32 file_id, imginfo_t *img_info, uint16 searched_tag)
+{
+ uint16 find_tag, find_ref;
+ int32 find_off, find_len;
+ intn num_imgs = 0;
+ imginfo_t *ptr = img_info;
+
+ find_tag = find_ref = 0;
+ find_off = find_len = 0;
+
+ while (Hfind(file_id, searched_tag, DFREF_WILDCARD, &find_tag, &find_ref, &find_off, &find_len, DF_FORWARD) == SUCCEED)
+ {
+ /* DFTAG_NULL is passed in for parent group because old images don't
+ have group structure. */
+ Store_imginfo(ptr, DFTAG_NULL, DFREF_WILDCARD, find_tag, find_ref);
+ ptr->offset = find_off; /* store offset */
+ num_imgs++;
+ ptr++;
+ } /* end while */
+ return num_imgs;
+} /* end Get_oldimgs() */
+
+/* -------------------------- Init_diminfo ------------------------ */
+/*
+ Initializes the dimension information.
+
+ Added to refactor repeated code. -BMR, Apr 23, 2015
+ */
+PRIVATE void
+Init_diminfo(dim_info_t *dim_info)
+{ /* Init_diminfo */
+ dim_info->dim_ref = DFREF_WILDCARD;
+ dim_info->xdim = 256;
+ dim_info->ydim = 1;
+ dim_info->ncomps = 3;
+ dim_info->nt = DFNT_UINT8;
+ dim_info->file_nt_subclass = DFNTF_HDFDEFAULT;
+ dim_info->il = MFGR_INTERLACE_PIXEL;
+ dim_info->nt_tag = DFTAG_NULL;
+ dim_info->nt_ref = DFREF_WILDCARD;
+ dim_info->comp_tag = DFTAG_NULL;
+ dim_info->comp_ref = DFREF_WILDCARD;
+} /* end Init_diminfo */
+
+/* -------------------------- Decode_diminfo ------------------------ */
+/*
+ Decodes dimension information.
+
+ The parameter *p points to a buffer containing the data read from the
+ file. The data is the previously encoded dimension information.
+
+ Added to refactor repeated code. -BMR, Apr 23, 2015
+ */
+PRIVATE void
+Decode_diminfo(uint8 *p, dim_info_t *dim_info)
+{
+ int16 int16var; /* temp var */
+
+ INT32DECODE(p, dim_info->xdim);
+ INT32DECODE(p, dim_info->ydim);
+ UINT16DECODE(p, dim_info->nt_tag);
+ UINT16DECODE(p, dim_info->nt_ref);
+ INT16DECODE(p, int16var);
+ dim_info->ncomps = (int32)int16var;
+ INT16DECODE(p, dim_info->il);
+ UINT16DECODE(p, dim_info->comp_tag);
+ UINT16DECODE(p, dim_info->comp_ref);
+} /* Decode_diminfo */
+
/*--------------------------------------------------------------------------
NAME
GRIget_image_list
@@ -470,16 +574,11 @@ static intn GRIget_image_list(int32 file_id,gr_info_t *gr_ptr)
uint16 gr_ref; /* ref # of the Vgroup containing new-style RIs */
intn curr_image; /* current image gathering information about */
intn nimages; /* total number of potential images */
+ intn noldimages; /* count of old imgs returned by Get_oldimgs */
int32 nri, nci, nri8, nci8, nii8, nvg; /* number of RIs, CIs, RI8s, CI8s & II8s & Vgroups */
- struct image_info {
- uint16 grp_tag,grp_ref; /* tag/ref of the group the image is in */
- uint16 img_tag,img_ref; /* tag/ref of the image itself */
- uint16 aux_ref; /* ref of aux. info about an image */
- int32 offset; /* offset of the image data */
- uint16 orig_tag; /* original tag before the elimination of duplicates */
- } *img_info;
uint16 find_tag, find_ref; /* storage for tag/ref pairs found */
int32 find_off, find_len; /* storage for offset/lengths of tag/refs found */
+ imginfo_t *img_info; /* image info list */
intn i, j; /* local counting variable */
intn ret_value = SUCCEED;
@@ -516,127 +615,133 @@ static intn GRIget_image_list(int32 file_id,gr_info_t *gr_ptr)
}
/* Get space to store the image offsets */
- if ((img_info = (struct image_info *) HDmalloc(nimages * sizeof(struct image_info))) == NULL)
+ if ((img_info = (imginfo_t *) HDmalloc(nimages * sizeof(imginfo_t))) == NULL)
HGOTO_ERROR(DFE_NOSPACE, FAIL);
- HDmemset(img_info,0,(size_t)nimages*sizeof(struct image_info));
+ HDmemset(img_info, 0, (size_t)nimages * sizeof(imginfo_t));
/* search through the GR group for raster images & global attributes */
curr_image = 0;
if((gr_ref=(uint16)Vfind(file_id,GR_NAME))!=0)
- {
- int32 gr_key; /* Vgroup key of the GR Vgroup */
+ {
+ int32 gr_key; /* Vgroup key of the GR Vgroup */
- gr_ptr->gr_ref=gr_ref; /* squirrel this away for later use */
- if((gr_key=Vattach(file_id,(int32)gr_ref,"r"))!=FAIL)
- {
- int32 nobjs=Vntagrefs(gr_key); /* The number of objects in the Vgroup */
- int32 img_key; /* Vgroup key of an image */
- int32 grp_tag,grp_ref; /* a tag/ref in the Vgroup */
- int32 img_tag,img_ref; /* image tag/ref in the Vgroup */
- char textbuf[VGNAMELENMAX + 1]; /* buffer to store the name in */
+ gr_ptr->gr_ref=gr_ref; /* squirrel this away for later use */
+ if((gr_key=Vattach(file_id,(int32)gr_ref,"r"))!=FAIL)
+ {
+ int32 nobjs=Vntagrefs(gr_key); /* The number of objects in the Vgroup */
+ int32 img_key; /* Vgroup key of an image */
+ int32 grp_tag,grp_ref; /* a tag/ref in the Vgroup */
+ int32 img_tag,img_ref; /* image tag/ref in the Vgroup */
+ char textbuf[VGNAMELENMAX + 1]; /* buffer to store the name in */
- for(i=0; i<nobjs; i++)
- {
- if(Vgettagref(gr_key,i,&grp_tag,&grp_ref)==FAIL)
- continue;
+ for(i=0; i<nobjs; i++)
+ {
+ if(Vgettagref(gr_key,i,&grp_tag,&grp_ref)==FAIL)
+ continue;
- switch(grp_tag)
+ switch(grp_tag)
+ {
+ case DFTAG_VG: /* should be an image */
+ if((img_key=Vattach(file_id,grp_ref,"r"))!=FAIL)
{
- case DFTAG_VG: /* should be an image */
- if((img_key=Vattach(file_id,grp_ref,"r"))!=FAIL)
- {
- if(Vgetclass(img_key,textbuf)!=FAIL)
- {
- if(!HDstrcmp(textbuf,RI_NAME))
- { /* found an image, whew! */
- for(j=0; j<Vntagrefs(img_key); j++)
- {
- if(Vgettagref(img_key,j,&img_tag,&img_ref)==FAIL)
- continue;
- if(img_tag==DFTAG_RI || img_tag==DFTAG_CI)
- {
- img_info[curr_image].grp_tag=(uint16)grp_tag;
- img_info[curr_image].grp_ref=(uint16)grp_ref;
- img_info[curr_image].img_tag=(uint16)img_tag;
- img_info[curr_image].img_ref=(uint16)img_ref;
- img_info[curr_image].offset = Hoffset(file_id, (uint16)img_tag, (uint16)img_ref); /* store offset */
- curr_image++;
- break;
- } /* end if */
- } /* end for */
- } /* end if */
- } /* end if */
- Vdetach(img_key);
- } /* end if */
+ if(Vgetclass(img_key,textbuf)!=FAIL)
+ {
+ if(!HDstrcmp(textbuf,RI_NAME))
+ { /* it is an image, get the image's tag/ref */
+ for(j=0; j<Vntagrefs(img_key); j++)
+ {
+ if(Vgettagref(img_key,j,&img_tag,&img_ref)==FAIL)
+ continue;
+ /* Make sure the tag is correct, then
+ store the image's info and the
+ tag/ref of the vgroup that represents
+ the image into image_info_struct and
+ increment image count */
+ if(img_tag==DFTAG_RI || img_tag==DFTAG_CI)
+ {
+ Store_imginfo(&img_info[curr_image], grp_tag, grp_ref, img_tag, img_ref);
+ img_info[curr_image].offset = Hoffset(file_id, (uint16)img_tag, (uint16)img_ref); /* store offset */
+ curr_image++;
+ break;
+ } /* end if */
+ } /* end for */
+ } /* end if */
+ } /* end if */
+ Vdetach(img_key);
+ } /* end if */
+ break; /* case DFTAG_VG, an image */
+
+ case DFTAG_VH: /* must be a "global" attribute */
+ {
+ at_info_t *new_attr; /* attr to add to the set of global attrs */
+ int32 at_key; /* VData key for the attribute */
+
+ if((new_attr=(at_info_t *)HDmalloc(sizeof(at_info_t)))==NULL)
+ HGOTO_ERROR(DFE_NOSPACE,FAIL);
+ new_attr->ref=(uint16)grp_ref;
+ new_attr->index=gr_ptr->gattr_count;
+ new_attr->data_modified=FALSE;
+ new_attr->new_at=FALSE;
+ new_attr->data=NULL;
+
+ /* Open the vdata to read the attr's info */
+ if((at_key=VSattach(file_id,(int32)grp_ref,"r"))!=FAIL)
+ {
+ char *fname;
+
+ /* Make certain the attribute only has one field */
+ if(VFnfields(at_key)!=1)
+ {
+ VSdetach(at_key);
+ HDfree(new_attr);
break;
+ } /* end if */
+ new_attr->nt=VFfieldtype(at_key,0);
+ new_attr->len=VFfieldorder(at_key,0);
+ if(new_attr->len==1)
+ new_attr->len=VSelts(at_key);
- case DFTAG_VH: /* must be a "global" attaribute */
- {
- at_info_t *new_attr; /* attribute to add to the set of local attributes */
- int32 at_key; /* VData key for the attribute */
-
- if((new_attr=(at_info_t *)HDmalloc(sizeof(at_info_t)))==NULL)
- HGOTO_ERROR(DFE_NOSPACE,FAIL);
- new_attr->ref=(uint16)grp_ref;
- new_attr->index=gr_ptr->gattr_count;
- new_attr->data_modified=FALSE;
- new_attr->new_at=FALSE;
- new_attr->data=NULL;
- if((at_key=VSattach(file_id,(int32)grp_ref,"r"))!=FAIL)
- {
- char *fname;
-
- /* Make certain the attribute only has one field */
- if(VFnfields(at_key)!=1)
- {
- VSdetach(at_key);
- HDfree(new_attr);
- break;
- } /* end if */
- new_attr->nt=VFfieldtype(at_key,0);
- new_attr->len=VFfieldorder(at_key,0);
- if(new_attr->len==1)
- new_attr->len=VSelts(at_key);
-
- /* Get the name of the attribute */
- if((fname=VFfieldname(at_key,0))==NULL)
- {
- sprintf(textbuf,"Attribute #%d",(int)new_attr->index);
- if((new_attr->name=(char *)HDmalloc(HDstrlen(textbuf)+1))==NULL)
- {
- VSdetach(at_key);
- HDfree(new_attr);
- HGOTO_ERROR(DFE_NOSPACE,FAIL);
- } /* end if */
- HDstrcpy(new_attr->name,textbuf);
- } /* end if */
- else
- {
- if((new_attr->name=(char *)HDmalloc(HDstrlen(fname)+1))==NULL)
- {
- VSdetach(at_key);
- HDfree(new_attr);
- HGOTO_ERROR(DFE_NOSPACE,FAIL);
- } /* end if */
- HDstrcpy(new_attr->name,fname);
- } /* end else */
+ /* Get the name of the attribute */
+ if((fname=VFfieldname(at_key,0))==NULL)
+ {
+ sprintf(textbuf,"Attribute #%d",(int)new_attr->index);
+ if((new_attr->name=(char *)HDmalloc(HDstrlen(textbuf)+1))==NULL)
+ {
+ VSdetach(at_key);
+ HDfree(new_attr);
+ HGOTO_ERROR(DFE_NOSPACE,FAIL);
+ } /* end if */
+ HDstrcpy(new_attr->name,textbuf);
+ } /* end if */
+ else
+ {
+ if((new_attr->name=(char *)HDmalloc(HDstrlen(fname)+1))==NULL)
+ {
+ VSdetach(at_key);
+ HDfree(new_attr);
+ HGOTO_ERROR(DFE_NOSPACE,FAIL);
+ } /* end if */
+ HDstrcpy(new_attr->name,fname);
+ } /* end else */
- tbbtdins(gr_ptr->gattree, new_attr, NULL); /* insert the attr instance in B-tree */
+ /* insert the attr instance in B-tree */
+ tbbtdins(gr_ptr->gattree, new_attr, NULL);
- VSdetach(at_key);
- } /* end if */
+ VSdetach(at_key);
+ } /* end if */
- gr_ptr->gattr_count++;
- } /* end case */
- break;
+ /* increment the number of GR global attributes */
+ gr_ptr->gattr_count++;
+ } /* end case DFTAG_VH, a global attribute */
+ break;
- default:
- break;
- } /* end switch */
- } /* end for */
- Vdetach(gr_key);
- } /* end if */
- } /* end if */
+ default:
+ break;
+ } /* end switch */
+ } /* end for */
+ Vdetach(gr_key);
+ } /* end if */
+ } /* end if */
/* Get information about the RIGs in the file */
find_tag = find_ref = 0;
@@ -655,10 +760,7 @@ static intn GRIget_image_list(int32 file_id,gr_info_t *gr_ptr)
{
if (elt_tag != DFTAG_NULL && elt_ref != DFREF_NONE) /* make certain we found an image */
{ /* store the information about the image */
- img_info[curr_image].grp_tag=DFTAG_RIG;
- img_info[curr_image].grp_ref=find_ref;
- img_info[curr_image].img_tag=elt_tag;
- img_info[curr_image].img_ref=elt_ref;
+ Store_imginfo(&img_info[curr_image], DFTAG_RIG, find_ref, elt_tag, elt_ref);
img_info[curr_image].offset = Hoffset(file_id, elt_tag, elt_ref); /* store offset */
curr_image++;
} /* end if */
@@ -667,40 +769,16 @@ static intn GRIget_image_list(int32 file_id,gr_info_t *gr_ptr)
} /* end while */
/* go through the RI8s */
- find_tag = find_ref = 0;
- while (Hfind(file_id, DFTAG_RI8, DFREF_WILDCARD, &find_tag, &find_ref, &find_off, &find_len, DF_FORWARD) == SUCCEED)
- {
- img_info[curr_image].grp_tag=DFTAG_NULL;
- img_info[curr_image].grp_ref=DFREF_WILDCARD;
- img_info[curr_image].img_tag=find_tag;
- img_info[curr_image].img_ref=find_ref;
- img_info[curr_image].offset = find_off; /* store offset */
- curr_image++;
- } /* end while */
+ noldimages = Get_oldimgs(file_id, &img_info[curr_image], DFTAG_RI8);
+ curr_image = curr_image + noldimages;
/* go through the CI8s */
- find_tag = find_ref = 0;
- while (Hfind(file_id, DFTAG_CI8, DFREF_WILDCARD, &find_tag, &find_ref, &find_off, &find_len, DF_FORWARD) == SUCCEED)
- {
- img_info[curr_image].grp_tag=DFTAG_NULL;
- img_info[curr_image].grp_ref=DFREF_WILDCARD;
- img_info[curr_image].img_tag=find_tag;
- img_info[curr_image].img_ref=find_ref;
- img_info[curr_image].offset = find_off; /* store offset */
- curr_image++;
- } /* end while */
+ noldimages = Get_oldimgs(file_id, &img_info[curr_image], DFTAG_CI8);
+ curr_image = curr_image + noldimages;
/* go through the II8s */
- find_tag = find_ref = 0;
- while (Hfind(file_id, DFTAG_II8, DFREF_WILDCARD, &find_tag, &find_ref, &find_off, &find_len, DF_FORWARD) == SUCCEED)
- {
- img_info[curr_image].grp_tag=DFTAG_NULL;
- img_info[curr_image].grp_ref=DFREF_WILDCARD;
- img_info[curr_image].img_tag=find_tag;
- img_info[curr_image].img_ref=find_ref;
- img_info[curr_image].offset = find_off; /* store offset */
- curr_image++;
- } /* end while */
+ noldimages = Get_oldimgs(file_id, &img_info[curr_image], DFTAG_II8);
+ curr_image = curr_image + noldimages;
/* Eliminate duplicate images by using the offset of the image data */
/* Here's a table for how the images will be eliminated: */
@@ -718,614 +796,532 @@ static intn GRIget_image_list(int32 file_id,gr_info_t *gr_ptr)
+-----------------+-----------------+--------+
*/
- nimages = curr_image; /* reset the number of images we really have */
for (i = 0; i < curr_image; i++)
- { /* go through the images looking for duplicates */
- if(img_info[i].img_tag!=DFTAG_NULL)
- for (j = i+1; j < curr_image; j++)
- {
- /* if their refs are different, they're not duplicate, skip */
- if(img_info[i].img_ref == img_info[j].img_ref)
+ { /* go through the images looking for duplicates */
+ if(img_info[i].img_tag!=DFTAG_NULL)
+ for (j = i+1; j < curr_image; j++)
+ {
+ /* if their refs are different, they're not duplicate, skip */
+ if(img_info[i].img_ref == img_info[j].img_ref)
if(img_info[j].img_tag!=DFTAG_NULL)
- {
+ {
/* If the element is special, get its type, to allow
linked block or chunked images to go into the if
statement below in order for the duplicate image be
eliminated - bug #814, BMR Feb, 2005 */
intn special_type = GRIisspecial_type(file_id,img_info[i].img_tag,img_info[i].img_ref);
- if (((img_info[i].offset!= INVALID_OFFSET && img_info[i]
-.offset!=0)
+ if (((img_info[i].offset!= INVALID_OFFSET && img_info[i].offset!=0)
&& img_info[i].offset == img_info[j].offset) ||
(img_info[i].offset==0
&& (special_type == SPECIAL_LINKED ||
special_type == SPECIAL_CHUNKED)))
- {
- /* eliminate the oldest tag from the match */
- switch(img_info[i].img_tag) {
- case DFTAG_RI:
- case DFTAG_CI: /* Newer style raster image, found in RIG & Vgroup */
- if(img_info[j].grp_tag==DFTAG_RIG)
- {
- img_info[j].orig_tag = img_info[j].img_tag;
+ {
+ /* eliminate the oldest tag from the match */
+ switch(img_info[i].img_tag)
+ {
+ case DFTAG_RI:
+ case DFTAG_CI: /* Newer style raster image, found in RIG & Vgroup */
+ if(img_info[j].grp_tag==DFTAG_RIG)
+ {
+ img_info[j].img_tag=DFTAG_NULL;
+ if(img_info[i].grp_tag==DFTAG_VG)
+ img_info[i].aux_ref=img_info[j].grp_ref;
+ } /* end if */
+ else
+ {
+ if(img_info[i].grp_tag==DFTAG_VG)
img_info[j].img_tag=DFTAG_NULL;
- if(img_info[i].grp_tag==DFTAG_VG)
- img_info[i].aux_ref=img_info[j].grp_ref;
- } /* end if */
- else
- if(img_info[i].grp_tag==DFTAG_VG)
- {
- img_info[j].orig_tag = img_info[j].img_tag;
- img_info[j].img_tag=DFTAG_NULL;
- }
- else
- {
- img_info[j].orig_tag = img_info[j].img_tag;
- img_info[j].img_tag=DFTAG_NULL;
- if(img_info[i].grp_tag==DFTAG_RIG)
- img_info[j].aux_ref=img_info[i].grp_ref;
- } /* end else */
- break;
-
- case DFTAG_RI8:
- case DFTAG_CI8:
- case DFTAG_II8: /* Eldest style raster image, no grouping */
- if(img_info[j].img_tag!=DFTAG_RI8 && img_info[j].img_tag!=DFTAG_CI8 && img_info[j].img_tag!=DFTAG_II8)
- img_info[i].img_tag=DFTAG_NULL;
else
+ {
img_info[j].img_tag=DFTAG_NULL;
- break;
-
- default: /* an image which was eliminated from the list of images */
- break;
- } /* end switch */
- nimages--; /* if duplicate found, decrement the number of images */
- } /* end if */
- } /* end if */
- } /* end for */
- } /* end for */
+ if(img_info[i].grp_tag==DFTAG_RIG)
+ img_info[j].aux_ref=img_info[i].grp_ref;
+ } /* end else */
+ } /* end else */
+ break;
+
+ case DFTAG_RI8:
+ case DFTAG_CI8:
+ case DFTAG_II8: /* Eldest style raster image, no grouping */
+ if(img_info[j].img_tag!=DFTAG_RI8 && img_info[j].img_tag!=DFTAG_CI8 && img_info[j].img_tag!=DFTAG_II8)
+ img_info[i].img_tag=DFTAG_NULL;
+ else
+ img_info[j].img_tag=DFTAG_NULL;
+ break;
+
+ default:
+ /* an image which was eliminated from the list of images */
+ break;
+ } /* end switch */
+ } /* end if */
+ } /* end if */
+ } /* end for */
+ } /* end for go through the images looking for duplicates */
/* Ok, now sort through the file for information about each image found */
for(i=0; i<curr_image; i++)
- {
- if(img_info[i].img_tag!=DFTAG_NULL)
+ {
+ if(img_info[i].img_tag!=DFTAG_NULL)
+ {
+ switch(img_info[i].grp_tag)
{
- switch(img_info[i].grp_tag) {
- case DFTAG_VG: /* New style raster image, found in a Vgroup */
+ case DFTAG_VG: /* New style raster image, found in a Vgroup */
+ {
+ ri_info_t *new_image; /* ptr to the image to read in */
+ int32 img_key; /* Vgroup key of an image */
+ int32 img_tag,img_ref; /* image tag/ref in the Vgroup */
+ char textbuf[VGNAMELENMAX + 1]; /* buffer to store the name in */
+ uint8 ntstring[4]; /* buffer to store NT info */
+ uint8 GRtbuf[64]; /* local buffer for reading RIG info */
+
+ if((img_key=Vattach(file_id,(int32)img_info[i].grp_ref,"r"))!=FAIL)
+ {
+ uint16 name_len;
+ if((new_image=(ri_info_t *)HDmalloc(sizeof(ri_info_t)))==NULL)
{
- ri_info_t *new_image; /* ptr to the image to read in */
- int32 img_key; /* Vgroup key of an image */
- int32 img_tag,img_ref; /* image tag/ref in the Vgroup */
- char textbuf[VGNAMELENMAX + 1]; /* buffer to store the name in */
- uint8 ntstring[4]; /* buffer to store NT info */
- uint8 GRtbuf[64]; /* local buffer for reading RIG info */
-
- if((img_key=Vattach(file_id,(int32)img_info[i].grp_ref,"r"))!=FAIL)
- {
- uint16 name_len;
- if((new_image=(ri_info_t *)HDmalloc(sizeof(ri_info_t)))==NULL)
- {
- HDfree(img_info); /* free offsets */
- Hclose(file_id);
- HGOTO_ERROR(DFE_NOSPACE,FAIL);
- } /* end if */
+ HDfree(img_info); /* free offsets */
+ Hclose(file_id);
+ HGOTO_ERROR(DFE_NOSPACE,FAIL);
+ } /* end if */
- /* Initialize all the fields in the image structure to zeros */
- HDmemset(new_image,0,sizeof(ri_info_t));
+ /* Initialize all the fields in the image structure to zeros */
+ HDmemset(new_image,0,sizeof(ri_info_t));
- /* Get the name of the image */
- if(Vgetnamelen(img_key,&name_len)==FAIL)
- name_len = 20; /* for "Raster Image #%d" */
- if((new_image->name=(char *)HDmalloc(name_len+1))==NULL)
- HGOTO_ERROR(DFE_NOSPACE,FAIL);
- if(Vgetname(img_key,new_image->name)==FAIL)
- sprintf(new_image->name,"Raster Image #%d",(int)i);
-
- /* Initialize the local attribute tree */
- new_image->lattr_count = 0;
- new_image->lattree = tbbtdmake(rigcompare, sizeof(int32), TBBT_FAST_INT32_COMPARE);
- if (new_image->lattree == NULL)
- HGOTO_ERROR(DFE_NOSPACE, FAIL);
- new_image->ri_ref=img_info[i].grp_ref;
- if(img_info[i].aux_ref!=0)
- new_image->rig_ref=img_info[i].aux_ref;
+ /* Get the name of the image */
+ if(Vgetnamelen(img_key,&name_len)==FAIL)
+ name_len = 20; /* for "Raster Image #%d" */
+ if((new_image->name=(char *)HDmalloc(name_len+1))==NULL)
+ HGOTO_ERROR(DFE_NOSPACE,FAIL);
+ if(Vgetname(img_key,new_image->name)==FAIL)
+ sprintf(new_image->name,"Raster Image #%d",(int)i);
+
+ /* Initialize the local attribute tree */
+ new_image->lattr_count = 0;
+ new_image->lattree = tbbtdmake(rigcompare, sizeof(int32), TBBT_FAST_INT32_COMPARE);
+ if (new_image->lattree == NULL)
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
+ new_image->ri_ref=img_info[i].grp_ref;
+ if(img_info[i].aux_ref!=0)
+ new_image->rig_ref=img_info[i].aux_ref;
+ else
+ new_image->rig_ref=DFREF_WILDCARD;
+
+ for(j=0; j<Vntagrefs(img_key); j++)
+ {
+ if(Vgettagref(img_key,j,&img_tag,&img_ref)==FAIL)
+ continue;
+
+ /* parse this tag/ref pair */
+ switch(img_tag)
+ {
+ case DFTAG_RI: /* Regular image data */
+ new_image->img_tag=(uint16)img_tag;
+ new_image->img_ref=(uint16)img_ref;
+ if(SPECIALTAG(new_image->img_tag)==TRUE)
+ {
+ new_image->use_buf_drvr=1;
+ }
+ break;
+
+ case DFTAG_CI: /* Compressed image data */
+ new_image->img_tag=(uint16)img_tag;
+ new_image->img_ref=(uint16)img_ref;
+ new_image->use_buf_drvr=1;
+ new_image->use_cr_drvr=1;
+ break;
+
+ case DFTAG_LUT: /* Palette */
+ new_image->lut_tag=(uint16)img_tag;
+ new_image->lut_ref=(uint16)img_ref;
+
+ /* Fill in some default palette dimension info, in case there isn't a DFTAG_LD for this palette */
+ if(new_image->lut_dim.dim_ref==0)
+ Init_diminfo(&(new_image->lut_dim));
+ break;
+
+ case DFTAG_LD: /* Palette dimensions */
+{
+ uint8 *p = GRtbuf;
+ if (Hgetelement(file_id, (uint16)img_tag, (uint16)img_ref, GRtbuf) != FAIL)
+ Decode_diminfo(p, &(new_image->lut_dim));
else
- new_image->rig_ref=DFREF_WILDCARD;
+ HGOTO_ERROR(DFE_READERROR, FAIL);
- for(j=0; j<Vntagrefs(img_key); j++)
- {
- if(Vgettagref(img_key,j,&img_tag,&img_ref)==FAIL)
- continue;
-
- /* parse this tag/ref pair */
- switch(img_tag) {
- case DFTAG_RI: /* Regular image data */
- new_image->img_tag=(uint16)img_tag;
- new_image->img_ref=(uint16)img_ref;
- /* fprintf(stderr, "SPECIALTAG(%d/%d) = %d\n", new_image->img_tag, new_image->img_ref, SPECIALTAG(new_image->img_tag));
- */
- if(SPECIALTAG(new_image->img_tag)==TRUE) {
- new_image->use_buf_drvr=1;
- } /* end if */
- break;
-
- case DFTAG_CI: /* Compressed image data */
- new_image->img_tag=(uint16)img_tag;
- new_image->img_ref=(uint16)img_ref;
- new_image->use_buf_drvr=1;
- new_image->use_cr_drvr=1;
- break;
-
- case DFTAG_LUT: /* Palette */
- new_image->lut_tag=(uint16)img_tag;
- new_image->lut_ref=(uint16)img_ref;
-
- /* Fill in some default palette dimension info, in case there isn't a DFTAG_LD for this palette */
- if(new_image->lut_dim.dim_ref==0)
- {
- new_image->lut_dim.dim_ref = DFREF_WILDCARD;
- new_image->lut_dim.xdim=256;
- new_image->lut_dim.ydim=1;
- new_image->lut_dim.ncomps=3;
- new_image->lut_dim.nt=DFNT_UINT8;
- new_image->lut_dim.file_nt_subclass=DFNTF_HDFDEFAULT;
- new_image->lut_dim.il=MFGR_INTERLACE_PIXEL;
- new_image->lut_dim.nt_tag=DFTAG_NULL;
- new_image->lut_dim.nt_ref=DFREF_WILDCARD;
- new_image->lut_dim.comp_tag=DFTAG_NULL;
- new_image->lut_dim.comp_ref=DFREF_WILDCARD;
- } /* end if */
- break;
-
- case DFTAG_LD: /* Palette dimensions */
- if (Hgetelement(file_id, (uint16)img_tag, (uint16)img_ref, GRtbuf) != FAIL)
- {
- int16 int16var;
- uint8 *p;
-
- p = GRtbuf;
- INT32DECODE(p, new_image->lut_dim.xdim);
- INT32DECODE(p, new_image->lut_dim.ydim);
- UINT16DECODE(p, new_image->lut_dim.nt_tag);
- UINT16DECODE(p, new_image->lut_dim.nt_ref);
- INT16DECODE(p, int16var);
- new_image->lut_dim.ncomps=(int32)int16var;
- INT16DECODE(p, new_image->lut_dim.il);
- UINT16DECODE(p, new_image->lut_dim.comp_tag);
- UINT16DECODE(p, new_image->lut_dim.comp_ref);
- }
- else
- HGOTO_ERROR(DFE_READERROR, FAIL);
-
- /* read NT */
- if (Hgetelement(file_id, new_image->lut_dim.nt_tag, new_image->lut_dim.nt_ref, ntstring) == FAIL)
- HGOTO_ERROR(DFE_READERROR, FAIL);
-
- /* check for any valid NT */
- if (ntstring[1] == DFNT_NONE)
- break;
-
- /* set NT info */
- new_image->lut_dim.dim_ref = (uint16)img_ref;
- new_image->lut_dim.nt = (int32)ntstring[1];
- new_image->lut_dim.file_nt_subclass = (int32)ntstring[3];
- if ((new_image->lut_dim.file_nt_subclass != DFNTF_HDFDEFAULT)
- && (new_image->lut_dim.file_nt_subclass!= DFNTF_PC)
- && (new_image->lut_dim.file_nt_subclass!= DFKgetPNSC(new_image->lut_dim.nt, DF_MT)))
- break; /* unknown subclass */
- if (new_image->lut_dim.file_nt_subclass!= DFNTF_HDFDEFAULT)
- { /* if native or little endian */
- if (new_image->lut_dim.file_nt_subclass!= DFNTF_PC) /* native */
- new_image->lut_dim.nt |= DFNT_NATIVE;
- else /* little endian */
- new_image->lut_dim.nt |= DFNT_LITEND;
- } /* end if */
- break;
-
- case DFTAG_ID: /* Image description info */
- if (Hgetelement(file_id, (uint16)img_tag, (uint16)img_ref, GRtbuf) != FAIL)
- {
- int16 int16var;
- uint8 *p;
-
- p = GRtbuf;
- INT32DECODE(p, new_image->img_dim.xdim);
- INT32DECODE(p, new_image->img_dim.ydim);
- UINT16DECODE(p, new_image->img_dim.nt_tag);
- UINT16DECODE(p, new_image->img_dim.nt_ref);
- INT16DECODE(p, int16var);
- new_image->img_dim.ncomps=(int32)int16var;
- INT16DECODE(p, new_image->img_dim.il);
- UINT16DECODE(p, new_image->img_dim.comp_tag);
- UINT16DECODE(p, new_image->img_dim.comp_ref);
- }
- else
- HGOTO_ERROR(DFE_READERROR, FAIL);
-
- /* read NT */
- if (Hgetelement(file_id, new_image->img_dim.nt_tag, new_image->img_dim.nt_ref, ntstring) == FAIL)
- HGOTO_ERROR(DFE_READERROR, FAIL);
-
- /* check for any valid NT */
- if (ntstring[1] == DFNT_NONE)
- break;
-
- /* set NT info */
- new_image->img_dim.dim_ref=(uint16)img_ref;
- new_image->img_dim.nt = (int32)ntstring[1];
- new_image->img_dim.file_nt_subclass = (int32)ntstring[3];
- if ((new_image->img_dim.file_nt_subclass != DFNTF_HDFDEFAULT)
- && (new_image->img_dim.file_nt_subclass!= DFNTF_PC)
- && (new_image->img_dim.file_nt_subclass!= DFKgetPNSC(new_image->img_dim.nt, DF_MT)))
- break; /* unknown subclass */
- if (new_image->img_dim.file_nt_subclass!= DFNTF_HDFDEFAULT)
- { /* if native or little endian */
- if (new_image->img_dim.file_nt_subclass!= DFNTF_PC) /* native */
- new_image->img_dim.nt |= DFNT_NATIVE;
- else /* little endian */
- new_image->img_dim.nt |= DFNT_LITEND;
- } /* end if */
- break;
-
- case DFTAG_VH: /* Attribute information */
- {
- at_info_t *new_attr; /* attribute to add to the set of local attributes */
- int32 at_key; /* VData key for the attribute */
-
- if((new_attr=(at_info_t *)HDmalloc(sizeof(at_info_t)))==NULL)
- HGOTO_ERROR(DFE_NOSPACE,FAIL);
- new_attr->ref=(uint16)img_ref;
- new_attr->index=new_image->lattr_count;
- new_attr->data_modified=FALSE;
- new_attr->new_at=FALSE;
- new_attr->data=NULL;
- if((at_key=VSattach(file_id,(int32)img_ref,"r"))!=FAIL)
- {
- char *fname;
-
- /* Make certain the attribute only has one field */
- if(VFnfields(at_key)!=1)
- {
- VSdetach(at_key);
- HDfree(new_attr);
- break;
- } /* end if */
- new_attr->nt=VFfieldtype(at_key,0);
- new_attr->len=VFfieldorder(at_key,0);
- if(new_attr->len==1)
- new_attr->len=VSelts(at_key);
-
- /* Get the name of the attribute */
- if((fname=VFfieldname(at_key,0))==NULL)
- {
- sprintf(textbuf,"Attribute #%d",(int)new_attr->index);
- if((new_attr->name=(char *)HDmalloc(HDstrlen(textbuf)+1))==NULL)
- {
- VSdetach(at_key);
- HDfree(new_attr);
- HGOTO_ERROR(DFE_NOSPACE,FAIL);
- } /* end if */
- HDstrcpy(new_attr->name,textbuf);
- } /* end if */
- else
- {
- if((new_attr->name=(char *)HDmalloc(HDstrlen(fname)+1))==NULL)
- {
- VSdetach(at_key);
- HDfree(new_attr);
- HGOTO_ERROR(DFE_NOSPACE,FAIL);
- } /* end if */
- HDstrcpy(new_attr->name,fname);
- } /* end else */
-
- tbbtdins(new_image->lattree, new_attr, NULL); /* insert the attr instance in B-tree */
+ /* read NT */
+ if (Hgetelement(file_id, new_image->lut_dim.nt_tag, new_image->lut_dim.nt_ref, ntstring) == FAIL)
+ HGOTO_ERROR(DFE_READERROR, FAIL);
- VSdetach(at_key);
- } /* end if */
+ /* check for any valid NT */
+ if (ntstring[1] == DFNT_NONE)
+ break;
- new_image->lattr_count++;
- } /* end case */
- break;
+ /* set NT info */
+ new_image->lut_dim.dim_ref = (uint16)img_ref;
+ new_image->lut_dim.nt = (int32)ntstring[1];
+ new_image->lut_dim.file_nt_subclass = (int32)ntstring[3];
+ if ((new_image->lut_dim.file_nt_subclass != DFNTF_HDFDEFAULT)
+ && (new_image->lut_dim.file_nt_subclass!= DFNTF_PC)
+ && (new_image->lut_dim.file_nt_subclass!= DFKgetPNSC(new_image->lut_dim.nt, DF_MT)))
+ break; /* unknown subclass */
+ if (new_image->lut_dim.file_nt_subclass!= DFNTF_HDFDEFAULT)
+ { /* if native or little endian */
+ if (new_image->lut_dim.file_nt_subclass!= DFNTF_PC) /* native */
+ new_image->lut_dim.nt |= DFNT_NATIVE;
+ else /* little endian */
+ new_image->lut_dim.nt |= DFNT_LITEND;
+ } /* end if */
+}
+ break;
- default: /* Unknown tag */
- break;
- } /* end switch */
- } /* end for */
- new_image->index=gr_ptr->gr_count;
- new_image->gr_ptr=gr_ptr; /* point up the tree */
- tbbtdins(gr_ptr->grtree, new_image, NULL); /* insert the new image into B-tree */
- gr_ptr->gr_count++;
- Vdetach(img_key);
- } /* end if */
- } /* end case */
- break;
+ case DFTAG_ID: /* Image description info */
+ {
+ uint8 *p = GRtbuf;
+ at_info_t *new_attr; /* attr to add to the local attr set */
+ if (Hgetelement(file_id, (uint16)img_tag, (uint16)img_ref, GRtbuf) != FAIL)
+ Decode_diminfo(p, &(new_image->img_dim));
+ else
+ HGOTO_ERROR(DFE_READERROR, FAIL);
- case DFTAG_RIG: /* Older style raster image, found in RIG */
- {
- int32 GroupID;
- uint16 elt_tag, elt_ref;
- ri_info_t *new_image; /* ptr to the image to read in */
- char textbuf[VGNAMELENMAX + 1]; /* buffer to store the name in */
- uint8 ntstring[4]; /* buffer to store NT info */
- uint8 GRtbuf[64]; /* local buffer for reading RIG info */
-
- /* read RIG into memory */
- if ((GroupID = DFdiread(file_id, DFTAG_RIG, img_info[i].grp_ref)) == FAIL)
- HGOTO_ERROR(DFE_READERROR, FAIL);
-
- if((new_image=(ri_info_t *)HDmalloc(sizeof(ri_info_t)))==NULL)
+ /* read NT */
+ if (Hgetelement(file_id, new_image->img_dim.nt_tag, new_image->img_dim.nt_ref, ntstring) == FAIL)
+ HGOTO_ERROR(DFE_READERROR, FAIL);
+
+ /* check for any valid NT */
+ if (ntstring[1] == DFNT_NONE)
+ break;
+
+ /* set NT info */
+ new_image->img_dim.dim_ref=(uint16)img_ref;
+ new_image->img_dim.nt = (int32)ntstring[1];
+ new_image->img_dim.file_nt_subclass = (int32)ntstring[3];
+ if ((new_image->img_dim.file_nt_subclass != DFNTF_HDFDEFAULT)
+ && (new_image->img_dim.file_nt_subclass!= DFNTF_PC)
+ && (new_image->img_dim.file_nt_subclass!= DFKgetPNSC(new_image->img_dim.nt, DF_MT)))
+ break; /* unknown subclass */
+ if (new_image->img_dim.file_nt_subclass!= DFNTF_HDFDEFAULT)
+ { /* if native or little endian */
+ if (new_image->img_dim.file_nt_subclass!= DFNTF_PC) /* native */
+ new_image->img_dim.nt |= DFNT_NATIVE;
+ else /* little endian */
+ new_image->img_dim.nt |= DFNT_LITEND;
+ } /* end if */
+ } /* end case DFTAG_ID */
+
+ case DFTAG_VH: /* Attribute information */
{
- HDfree(img_info); /* free offsets */
- Hclose(file_id);
- HGOTO_ERROR(DFE_NOSPACE,FAIL);
- } /* end if */
+ at_info_t *new_attr; /* attr to add to the local attr set */
+ int32 at_key; /* VData key for the attribute */
- /* Initialize all the fields in the image structure to zeros */
- HDmemset(new_image,0,sizeof(ri_info_t));
+ if((new_attr=(at_info_t *)HDmalloc(sizeof(at_info_t)))==NULL)
+ HGOTO_ERROR(DFE_NOSPACE,FAIL);
+ new_attr->ref=(uint16)img_ref;
+ new_attr->index=new_image->lattr_count;
+ new_attr->data_modified=FALSE;
+ new_attr->new_at=FALSE;
+ new_attr->data=NULL;
+ if((at_key=VSattach(file_id,(int32)img_ref,"r"))!=FAIL)
+ {
+ char *fname;
+
+ /* Make certain the attribute only has one field */
+ if(VFnfields(at_key)!=1)
+ {
+ VSdetach(at_key);
+ HDfree(new_attr);
+ break;
+ } /* end if */
+ new_attr->nt=VFfieldtype(at_key,0);
+ new_attr->len=VFfieldorder(at_key,0);
+ if(new_attr->len==1)
+ new_attr->len=VSelts(at_key);
+
+ /* Get the name of the attribute */
+ if((fname=VFfieldname(at_key,0))==NULL)
+ {
+ sprintf(textbuf,"Attribute #%d",(int)new_attr->index);
+ if((new_attr->name=(char *)HDmalloc(HDstrlen(textbuf)+1))==NULL)
+ {
+ VSdetach(at_key);
+ HDfree(new_attr);
+ HGOTO_ERROR(DFE_NOSPACE,FAIL);
+ } /* end if */
+ HDstrcpy(new_attr->name,textbuf);
+ } /* end if */
+ else
+ {
+ if((new_attr->name=(char *)HDmalloc(HDstrlen(fname)+1))==NULL)
+ {
+ VSdetach(at_key);
+ HDfree(new_attr);
+ HGOTO_ERROR(DFE_NOSPACE,FAIL);
+ } /* end if */
+ HDstrcpy(new_attr->name,fname);
+ } /* end else */
+
+ tbbtdins(new_image->lattree, new_attr, NULL); /* insert the attr instance in B-tree */
- /* Get the name of the image */
- sprintf(textbuf,"Raster Image #%d",(int)i);
- if((new_image->name=(char *)HDmalloc(HDstrlen(textbuf)+1))==NULL)
- HGOTO_ERROR(DFE_NOSPACE,FAIL);
- HDstrcpy(new_image->name,textbuf);
- new_image->name_generated = TRUE;
-
- /* Initialize the local attribute tree */
- new_image->lattree = tbbtdmake(rigcompare, sizeof(int32), TBBT_FAST_INT32_COMPARE);
- if (new_image->lattree == NULL)
- HGOTO_ERROR(DFE_NOSPACE, FAIL);
- new_image->ri_ref=DFREF_WILDCARD;
- new_image->rig_ref=img_info[i].grp_ref;
-
- while (DFdiget(GroupID, &elt_tag, &elt_ref)!=FAIL)
- { /* get next tag/ref */
- switch (elt_tag)
- { /* process tag/ref */
- case DFTAG_RI: /* regular image data */
- new_image->img_tag=elt_tag;
- new_image->img_ref=elt_ref;
- if(SPECIALTAG(new_image->img_tag)==TRUE) {
- new_image->use_buf_drvr=1;
- } /* end if */
- break;
-
- case DFTAG_CI: /* compressed image data */
- new_image->img_tag=elt_tag;
- new_image->img_ref=elt_ref;
- new_image->use_buf_drvr=1;
- new_image->use_cr_drvr=1;
- break;
-
- case DFTAG_LUT: /* Palette */
- new_image->lut_tag=elt_tag;
- new_image->lut_ref=elt_ref;
-
- /* Fill in some default palette dimension info, in case there isn't a DFTAG_LD for this palette */
- if(new_image->lut_dim.dim_ref==0)
- {
- new_image->lut_dim.dim_ref = DFREF_WILDCARD;
- new_image->lut_dim.xdim=256;
- new_image->lut_dim.ydim=1;
- new_image->lut_dim.ncomps=3;
- new_image->lut_dim.nt=DFNT_UINT8;
- new_image->lut_dim.file_nt_subclass=DFNTF_HDFDEFAULT;
- new_image->lut_dim.il=MFGR_INTERLACE_PIXEL;
- new_image->lut_dim.nt_tag=DFTAG_NULL;
- new_image->lut_dim.nt_ref=DFREF_WILDCARD;
- new_image->lut_dim.comp_tag=DFTAG_NULL;
- new_image->lut_dim.comp_ref=DFREF_WILDCARD;
- } /* end if */
- break;
-
- case DFTAG_LD: /* Palette dimensions */
- if (Hgetelement(file_id, elt_tag, elt_ref, GRtbuf) != FAIL)
- {
- int16 int16var;
- uint8 *p;
-
- p = GRtbuf;
- INT32DECODE(p, new_image->lut_dim.xdim);
- INT32DECODE(p, new_image->lut_dim.ydim);
- UINT16DECODE(p, new_image->lut_dim.nt_tag);
- UINT16DECODE(p, new_image->lut_dim.nt_ref);
- INT16DECODE(p, int16var);
- new_image->lut_dim.ncomps=(int32)int16var;
- INT16DECODE(p, new_image->lut_dim.il);
- UINT16DECODE(p, new_image->lut_dim.comp_tag);
- UINT16DECODE(p, new_image->lut_dim.comp_ref);
- }
- else
- {
- DFdifree( GroupID );
- HGOTO_ERROR(DFE_READERROR, FAIL);
- }
-
- /* read NT */
- if (Hgetelement(file_id, new_image->lut_dim.nt_tag, new_image->lut_dim.nt_ref, ntstring) == FAIL)
- {
- DFdifree( GroupID );
- HGOTO_ERROR(DFE_READERROR, FAIL);
- }
-
- /* check for any valid NT */
- if (ntstring[1] == DFNT_NONE)
- break;
-
- /* set NT info */
- new_image->lut_dim.dim_ref = elt_ref;
- new_image->lut_dim.nt = (int32)ntstring[1];
- new_image->lut_dim.file_nt_subclass = (int32)ntstring[3];
- if ((new_image->lut_dim.file_nt_subclass != DFNTF_HDFDEFAULT)
- && (new_image->lut_dim.file_nt_subclass!= DFNTF_PC)
- && (new_image->lut_dim.file_nt_subclass!= DFKgetPNSC(new_image->lut_dim.nt, DF_MT)))
- break; /* unknown subclass */
- if (new_image->lut_dim.file_nt_subclass!= DFNTF_HDFDEFAULT)
- { /* if native or little endian */
- if (new_image->lut_dim.file_nt_subclass!= DFNTF_PC) /* native */
- new_image->lut_dim.nt |= DFNT_NATIVE;
- else /* little endian */
- new_image->lut_dim.nt |= DFNT_LITEND;
- } /* end if */
- break;
-
- case DFTAG_ID: /* Image description info */
- if (Hgetelement(file_id, elt_tag, elt_ref, GRtbuf) != FAIL)
- {
- int16 int16var;
- uint8 *p;
-
- p = GRtbuf;
- INT32DECODE(p, new_image->img_dim.xdim);
- INT32DECODE(p, new_image->img_dim.ydim);
- UINT16DECODE(p, new_image->img_dim.nt_tag);
- UINT16DECODE(p, new_image->img_dim.nt_ref);
- INT16DECODE(p, int16var);
- new_image->img_dim.ncomps=(int32)int16var;
- INT16DECODE(p, new_image->img_dim.il);
- UINT16DECODE(p, new_image->img_dim.comp_tag);
- UINT16DECODE(p, new_image->img_dim.comp_ref);
- }
- else
- {
- DFdifree( GroupID );
- HGOTO_ERROR(DFE_GETELEM, FAIL);
- }
-
- /* read NT */
- if (Hgetelement(file_id, new_image->img_dim.nt_tag, new_image->img_dim.nt_ref, ntstring) == FAIL)
- {
- DFdifree( GroupID );
- HGOTO_ERROR(DFE_GETELEM, FAIL);
- }
-
- /* check for any valid NT */
- if (ntstring[1] == DFNT_NONE)
- break;
-
- /* set NT info */
- new_image->img_dim.dim_ref=elt_ref;
- new_image->img_dim.nt = (int32)ntstring[1];
- new_image->img_dim.file_nt_subclass = (int32)ntstring[3];
- if ((new_image->img_dim.file_nt_subclass != DFNTF_HDFDEFAULT)
- && (new_image->img_dim.file_nt_subclass!= DFNTF_PC)
- && (new_image->img_dim.file_nt_subclass!= DFKgetPNSC(new_image->img_dim.nt, DF_MT)))
- break; /* unknown subclass */
- if (new_image->img_dim.file_nt_subclass!= DFNTF_HDFDEFAULT)
- { /* if native or little endian */
- if (new_image->img_dim.file_nt_subclass!= DFNTF_PC) /* native */
- new_image->img_dim.nt |= DFNT_NATIVE;
- else /* little endian */
- new_image->img_dim.nt |= DFNT_LITEND;
- } /* end if */
- break;
+ VSdetach(at_key);
+ } /* end if */
- default: /* ignore unknown tags */
- break;
- } /* end switch */
- } /* end while */
- new_image->index=gr_ptr->gr_count;
- new_image->gr_ptr=gr_ptr; /* point up the tree */
- tbbtdins(gr_ptr->grtree, new_image, NULL); /* insert the new image into B-tree */
- gr_ptr->gr_count++;
- } /* end case */
- break;
-
- case DFTAG_NULL: /* Eldest style raster image, no grouping */
- {
- ri_info_t *new_image; /* ptr to the image to read in */
- char textbuf[VGNAMELENMAX + 1]; /* buffer to store the name in */
- uint8 GRtbuf[64]; /* local buffer for reading RIG info */
+ new_image->lattr_count++;
- if((new_image=(ri_info_t *)HDmalloc(sizeof(ri_info_t)))==NULL)
- {
- HDfree(img_info); /* free offsets */
- Hclose(file_id);
- HGOTO_ERROR(DFE_NOSPACE,FAIL);
- } /* end if */
+ break;
+ } /* end case DFTAG_VH */
- /* Initialize all the fields in the image structure to zeros */
- HDmemset(new_image,0,sizeof(ri_info_t));
+ default: /* Unknown tag */
+ break;
+ } /* end switch */
+ } /* end for */
- /* Get the name of the image */
- sprintf(textbuf,"Raster Image #%d",(int)i);
- if((new_image->name=(char *)HDmalloc(HDstrlen(textbuf)+1))==NULL)
- HGOTO_ERROR(DFE_NOSPACE,FAIL);
- HDstrcpy(new_image->name,textbuf);
- new_image->name_generated = TRUE;
-
- /* Initialize the local attribute tree */
- new_image->lattree = tbbtdmake(rigcompare, sizeof(int32), TBBT_FAST_INT32_COMPARE);
- if (new_image->lattree == NULL)
- HGOTO_ERROR(DFE_NOSPACE, FAIL);
- new_image->ri_ref=DFREF_WILDCARD;
- new_image->rig_ref=DFREF_WILDCARD;
+ new_image->index=gr_ptr->gr_count;
+ new_image->gr_ptr=gr_ptr; /* point up the tree */
+ tbbtdins(gr_ptr->grtree, new_image, NULL); /* insert the new image into B-tree */
+ gr_ptr->gr_count++;
+ Vdetach(img_key);
+ } /* end if */
+ } /* end case DFTAG_VG */
+ break;
+
+ case DFTAG_RIG: /* Older style raster image, found in RIG */
+ {
+ int32 GroupID;
+ uint16 elt_tag, elt_ref;
+ ri_info_t *new_image; /* ptr to the image to read in */
+ char textbuf[VGNAMELENMAX + 1]; /* buffer to store the name in */
+ uint8 ntstring[4]; /* buffer to store NT info */
+ uint8 GRtbuf[64]; /* local buffer for reading RIG info */
+
+ /* read RIG into memory */
+ if ((GroupID = DFdiread(file_id, DFTAG_RIG, img_info[i].grp_ref)) == FAIL)
+ HGOTO_ERROR(DFE_READERROR, FAIL);
+
+ if((new_image=(ri_info_t *)HDmalloc(sizeof(ri_info_t)))==NULL)
+ {
+ HDfree(img_info); /* free offsets */
+ Hclose(file_id);
+ HGOTO_ERROR(DFE_NOSPACE,FAIL);
+ } /* end if */
- /* Get tag/ref for image */
- new_image->img_tag=img_info[i].img_tag;
- new_image->img_ref=img_info[i].img_ref;
+ /* Initialize all the fields in the image structure to zeros */
+ HDmemset(new_image,0,sizeof(ri_info_t));
- /* Get dimension information */
- if (Hgetelement(file_id, DFTAG_ID8, new_image->img_ref, GRtbuf) != FAIL)
+ /* Get the name of the image */
+ sprintf(textbuf,"Raster Image #%d",(int)i);
+ if((new_image->name=(char *)HDmalloc(HDstrlen(textbuf)+1))==NULL)
+ HGOTO_ERROR(DFE_NOSPACE,FAIL);
+ HDstrcpy(new_image->name,textbuf);
+ new_image->name_generated = TRUE;
+
+ /* Initialize the local attribute tree */
+ new_image->lattree = tbbtdmake(rigcompare, sizeof(int32), TBBT_FAST_INT32_COMPARE);
+ if (new_image->lattree == NULL)
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
+ new_image->ri_ref=DFREF_WILDCARD;
+ new_image->rig_ref=img_info[i].grp_ref;
+
+ while (DFdiget(GroupID, &elt_tag, &elt_ref)!=FAIL)
+ { /* get next tag/ref */
+ switch (elt_tag)
+ { /* process tag/ref */
+ case DFTAG_RI: /* regular image data */
+ new_image->img_tag=elt_tag;
+ new_image->img_ref=elt_ref;
+ if(SPECIALTAG(new_image->img_tag)==TRUE)
{
- uint8 *p;
- uint16 u;
-
- p = GRtbuf;
- UINT16DECODE(p, u);
- new_image->img_dim.xdim=(int32)u;
- UINT16DECODE(p, u);
- new_image->img_dim.ydim=(int32)u;
- } /* end if */
- else
- HGOTO_ERROR(DFE_GETELEM, FAIL);
-
- /* only 8-bit images, so fill in rest of dim info */
- new_image->img_dim.dim_ref=DFREF_WILDCARD;
- new_image->img_dim.ncomps=1;
- new_image->img_dim.nt=DFNT_UINT8;
- new_image->img_dim.file_nt_subclass=DFNTF_HDFDEFAULT;
- new_image->img_dim.il=MFGR_INTERLACE_PIXEL;
- new_image->img_dim.nt_tag=DFTAG_NULL;
- new_image->img_dim.nt_ref=DFREF_WILDCARD;
- new_image->img_dim.comp_tag=DFTAG_NULL;
- new_image->img_dim.comp_ref=DFREF_WILDCARD;
-
- /* Get palette information */
- if(Hexist(file_id, DFTAG_IP8, new_image->img_ref)==SUCCEED)
+ new_image->use_buf_drvr=1;
+ } /* end if */
+ break;
+
+ case DFTAG_CI: /* compressed image data */
+ new_image->img_tag=elt_tag;
+ new_image->img_ref=elt_ref;
+ new_image->use_buf_drvr=1;
+ new_image->use_cr_drvr=1;
+ break;
+
+ case DFTAG_LUT: /* Palette */
+ new_image->lut_tag=elt_tag;
+ new_image->lut_ref=elt_ref;
+
+ /* Fill in some default palette dimension info, in
+ case there isn't a DFTAG_LD for this palette */
+ if(new_image->lut_dim.dim_ref==0)
{
- new_image->lut_tag=DFTAG_IP8;
- new_image->lut_ref=new_image->img_ref;
-
- /* set palette dimensions too */
- new_image->lut_dim.dim_ref = DFREF_WILDCARD;
- new_image->lut_dim.xdim=256;
- new_image->lut_dim.ydim=1;
- new_image->lut_dim.ncomps=3;
- new_image->lut_dim.nt=DFNT_UINT8;
- new_image->lut_dim.file_nt_subclass=DFNTF_HDFDEFAULT;
- new_image->lut_dim.il=MFGR_INTERLACE_PIXEL;
- new_image->lut_dim.nt_tag=DFTAG_NULL;
- new_image->lut_dim.nt_ref=DFREF_WILDCARD;
- new_image->lut_dim.comp_tag=DFTAG_NULL;
- new_image->lut_dim.comp_ref=DFREF_WILDCARD;
+ Init_diminfo(&(new_image->lut_dim));
} /* end if */
- else
- new_image->lut_tag=new_image->lut_ref=DFREF_WILDCARD;
+ break;
- new_image->index=gr_ptr->gr_count;
- new_image->gr_ptr=gr_ptr; /* point up the tree */
- tbbtdins(gr_ptr->grtree, new_image, NULL); /* insert the new image into B-tree */
- gr_ptr->gr_count++;
- } /* end case */
- break;
+ case DFTAG_LD: /* Palette dimensions */
+{
+ uint8 *p = GRtbuf;
+ if (Hgetelement(file_id, elt_tag, elt_ref, GRtbuf) != FAIL)
+ Decode_diminfo(p, &(new_image->lut_dim));
+ else
+ {
+ DFdifree(GroupID);
+ HGOTO_ERROR(DFE_READERROR, FAIL);
+ }
- default: /* an image which was eliminated from the list of images */
- break;
- } /* end switch */
- } /* end if */
- } /* end for */
+ /* read NT */
+ if (Hgetelement(file_id, new_image->lut_dim.nt_tag, new_image->lut_dim.nt_ref, ntstring) == FAIL)
+ {
+ DFdifree( GroupID );
+ HGOTO_ERROR(DFE_READERROR, FAIL);
+ }
+
+ /* check for any valid NT */
+ if (ntstring[1] == DFNT_NONE)
+ break;
+
+ /* set NT info */
+ new_image->lut_dim.dim_ref = elt_ref;
+ new_image->lut_dim.nt = (int32)ntstring[1];
+ new_image->lut_dim.file_nt_subclass = (int32)ntstring[3];
+ if ((new_image->lut_dim.file_nt_subclass != DFNTF_HDFDEFAULT)
+ && (new_image->lut_dim.file_nt_subclass!= DFNTF_PC)
+ && (new_image->lut_dim.file_nt_subclass!= DFKgetPNSC(new_image->lut_dim.nt, DF_MT)))
+ break; /* unknown subclass */
+ if (new_image->lut_dim.file_nt_subclass!= DFNTF_HDFDEFAULT)
+ { /* if native or little endian */
+ if (new_image->lut_dim.file_nt_subclass!= DFNTF_PC) /* native */
+ new_image->lut_dim.nt |= DFNT_NATIVE;
+ else /* little endian */
+ new_image->lut_dim.nt |= DFNT_LITEND;
+ } /* end if */
+ break;
+}
+ case DFTAG_ID: /* Image description info */
+ {
+ uint8 *p = GRtbuf;
+ if (Hgetelement(file_id, elt_tag, elt_ref, GRtbuf) != FAIL)
+ Decode_diminfo(p, &(new_image->img_dim));
+ else
+ {
+ DFdifree( GroupID );
+ HGOTO_ERROR(DFE_GETELEM, FAIL);
+ }
+
+ /* read NT */
+ if (Hgetelement(file_id, new_image->img_dim.nt_tag, new_image->img_dim.nt_ref, ntstring) == FAIL)
+ {
+ DFdifree( GroupID );
+ HGOTO_ERROR(DFE_GETELEM, FAIL);
+ }
+
+ /* check for any valid NT */
+ if (ntstring[1] == DFNT_NONE)
+ break;
+
+ /* set NT info */
+ new_image->img_dim.dim_ref=elt_ref;
+ new_image->img_dim.nt = (int32)ntstring[1];
+ new_image->img_dim.file_nt_subclass = (int32)ntstring[3];
+ if ((new_image->img_dim.file_nt_subclass != DFNTF_HDFDEFAULT)
+ && (new_image->img_dim.file_nt_subclass!= DFNTF_PC)
+ && (new_image->img_dim.file_nt_subclass!= DFKgetPNSC(new_image->img_dim.nt, DF_MT)))
+ break; /* unknown subclass */
+ if (new_image->img_dim.file_nt_subclass!= DFNTF_HDFDEFAULT)
+ { /* if native or little endian */
+ if (new_image->img_dim.file_nt_subclass!= DFNTF_PC) /* native */
+ new_image->img_dim.nt |= DFNT_NATIVE;
+ else /* little endian */
+ new_image->img_dim.nt |= DFNT_LITEND;
+ } /* end if */
+ break;
+ }
+ default: /* ignore unknown tags */
+ break;
+ } /* end switch */
+ } /* end while */
+ new_image->index=gr_ptr->gr_count;
+ new_image->gr_ptr=gr_ptr; /* point up the tree */
+ tbbtdins(gr_ptr->grtree, new_image, NULL); /* insert the new image into B-tree */
+ gr_ptr->gr_count++;
+ } /* end case DFTAG_RIG */
+ break;
+
+ case DFTAG_NULL: /* Eldest style raster image, no grouping */
+ {
+ ri_info_t *new_image; /* ptr to the image to read in */
+ char textbuf[VGNAMELENMAX + 1]; /* buffer to store the name in */
+ uint8 GRtbuf[64]; /* local buffer for reading RIG info */
+
+ if((new_image=(ri_info_t *)HDmalloc(sizeof(ri_info_t)))==NULL)
+ {
+ HDfree(img_info); /* free offsets */
+ Hclose(file_id);
+ HGOTO_ERROR(DFE_NOSPACE,FAIL);
+ } /* end if */
+
+ /* Initialize all the fields in the image structure to zeros */
+ HDmemset(new_image,0,sizeof(ri_info_t));
+
+ /* Get the name of the image */
+ sprintf(textbuf,"Raster Image #%d",(int)i);
+ if((new_image->name=(char *)HDmalloc(HDstrlen(textbuf)+1))==NULL)
+ HGOTO_ERROR(DFE_NOSPACE,FAIL);
+ HDstrcpy(new_image->name,textbuf);
+ new_image->name_generated = TRUE;
+
+ /* Initialize the local attribute tree */
+ new_image->lattree = tbbtdmake(rigcompare, sizeof(int32), TBBT_FAST_INT32_COMPARE);
+ if (new_image->lattree == NULL)
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
+ new_image->ri_ref=DFREF_WILDCARD;
+ new_image->rig_ref=DFREF_WILDCARD;
+
+ /* Get tag/ref for image */
+ new_image->img_tag=img_info[i].img_tag;
+ new_image->img_ref=img_info[i].img_ref;
+
+ /* Get dimension information for this 8-bit image */
+
+ /* Initialize dim info to default */
+ Init_diminfo(&(new_image->img_dim));
+
+ /* Reassign valid values */
+ if (Hgetelement(file_id, DFTAG_ID8, new_image->img_ref, GRtbuf) != FAIL)
+ {
+ uint8 *p;
+ uint16 u;
+
+ p = GRtbuf;
+ UINT16DECODE(p, u);
+ new_image->img_dim.xdim=(int32)u;
+ UINT16DECODE(p, u);
+ new_image->img_dim.ydim=(int32)u;
+ new_image->img_dim.ncomps=1;
+ } /* end if */
+ else
+ HGOTO_ERROR(DFE_GETELEM, FAIL);
+
+ /* Get palette information */
+ if(Hexist(file_id, DFTAG_IP8, new_image->img_ref)==SUCCEED)
+ {
+ new_image->lut_tag=DFTAG_IP8;
+ new_image->lut_ref=new_image->img_ref;
+
+ /* set palette dimensions too */
+ Init_diminfo(&(new_image->lut_dim));
+ } /* end if */
+ else
+ new_image->lut_tag=new_image->lut_ref=DFREF_WILDCARD;
+
+ new_image->index=gr_ptr->gr_count;
+ new_image->gr_ptr=gr_ptr; /* point up the tree */
+ tbbtdins(gr_ptr->grtree, new_image, NULL); /* insert the new image into B-tree */
+ gr_ptr->gr_count++;
+ } /* end case DFTAG_NULL */
+ break;
+
+ default: /* an image which was eliminated from the list of images */
+ break;
+ } /* end switch */
+ } /* end if */
+ } /* end for */
- HDfree(img_info); /* free offsets */
+ HDfree(img_info); /* free image info structures */
done:
if(ret_value == FAIL)
diff --git a/hdf/src/mfgrf.c b/hdf/src/mfgrf.c
index 6123210..a03fdce 100644
--- a/hdf/src/mfgrf.c
+++ b/hdf/src/mfgrf.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5700 $";
-#endif
-
-/* $Id: mfgrf.c 5700 2011-10-10 04:32:38Z bmribler $ */
+/* $Id: mfgrf.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
* File: mfsdf.c
diff --git a/hdf/src/mstdio.c b/hdf/src/mstdio.c
index 8306844..5d8ae14 100644
--- a/hdf/src/mstdio.c
+++ b/hdf/src/mstdio.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: mstdio.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: mstdio.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
FILE
diff --git a/hdf/src/tbbt.c b/hdf/src/tbbt.c
index 091c37f..2cc165f 100644
--- a/hdf/src/tbbt.c
+++ b/hdf/src/tbbt.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6043 $";
-#endif
-
-/* $Id: tbbt.c 6043 2014-01-21 21:09:03Z acheng $ */
+/* $Id: tbbt.c 6357 2016-05-13 05:00:06Z bmribler $ */
/* "tbbt.c" -- Routines for using threaded, balanced, binary trees. */
/* Extended from (added threads to) Knuth 6.2.3, Algorithm A (AVL trees) */
diff --git a/hdf/src/vattr.c b/hdf/src/vattr.c
index d8c8c82..9c0d9b6 100644
--- a/hdf/src/vattr.c
+++ b/hdf/src/vattr.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5899 $";
-#endif
-
-/* $Id: vattr.c 5899 2013-01-09 21:18:13Z bmribler $ */
+/* $Id: vattr.c 6357 2016-05-13 05:00:06Z bmribler $ */
/**************************************************************
*
diff --git a/hdf/src/vattr.h b/hdf/src/vattr.h
index 22002e9..7e77a11 100644
--- a/hdf/src/vattr.h
+++ b/hdf/src/vattr.h
@@ -11,16 +11,12 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $ "
-#endif
-
/***********************************************************
* vattr.h - vdata/vgroup attribute interface
*
*********************************************************** */
-/* $Id: vattr.h 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: vattr.h 6358 2016-05-13 15:12:20Z bmribler $ */
#ifndef _VATTR_H
#define _VATTR_H
diff --git a/hdf/src/vattrf.c b/hdf/src/vattrf.c
index a9b5add..02795cf 100644
--- a/hdf/src/vattrf.c
+++ b/hdf/src/vattrf.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: vattrf.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: vattrf.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
*
* vattrf.c
diff --git a/hdf/src/vconv.c b/hdf/src/vconv.c
index a136706..486f6d6 100644
--- a/hdf/src/vconv.c
+++ b/hdf/src/vconv.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5691 $";
-#endif
-
-/* $Id: vconv.c 5691 2011-09-19 16:02:14Z bmribler $ */
+/* $Id: vconv.c 6357 2016-05-13 05:00:06Z bmribler $ */
/* obsolete code for HDF 3.2. 26/march/92 jason ng */
/* except for the following routines:
diff --git a/hdf/src/vg.c b/hdf/src/vg.c
index bf598d0..6d96c6c 100644
--- a/hdf/src/vg.c
+++ b/hdf/src/vg.c
@@ -11,10 +11,6 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5697 $";
-#endif
-/* vg.c,v 1.15.4.1 1993/10/26 19:25:07 georgev Exp */
/*
FILE
diff --git a/hdf/src/vgf.c b/hdf/src/vgf.c
index c193781..f961157 100644
--- a/hdf/src/vgf.c
+++ b/hdf/src/vgf.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5737 $";
-#endif
-
-/* $Id: vgf.c 5737 2012-01-06 04:52:28Z brtnfld $ */
+/* $Id: vgf.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*-----------------------------------------------------------------------------
FUNCTION NAMEING CONVENTION:(For the most part true)
diff --git a/hdf/src/vhi.c b/hdf/src/vhi.c
index 3e3234f..ddc9a45 100644
--- a/hdf/src/vhi.c
+++ b/hdf/src/vhi.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: vhi.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: vhi.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
* File
* vhi.c
diff --git a/hdf/src/vio.c b/hdf/src/vio.c
index 6758ee7..ad9df9c 100644
--- a/hdf/src/vio.c
+++ b/hdf/src/vio.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6026 $";
-#endif
-
-/* $Id: vio.c 6026 2014-01-16 15:16:16Z bmribler $ */
+/* $Id: vio.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*****************************************************************************
file - vio.c
diff --git a/hdf/src/vparse.c b/hdf/src/vparse.c
index 2ba3c58..bce374e 100644
--- a/hdf/src/vparse.c
+++ b/hdf/src/vparse.c
@@ -11,11 +11,6 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6043 $";
-#endif
-/* $Id: vparse.c 6043 2014-01-21 21:09:03Z acheng $ */
-
/*****************************************************************************
file - vparse.c
diff --git a/hdf/src/vrw.c b/hdf/src/vrw.c
index e705fb6..34bdcaf 100644
--- a/hdf/src/vrw.c
+++ b/hdf/src/vrw.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6026 $";
-#endif
-
-/* $Id: vrw.c 6026 2014-01-16 15:16:16Z bmribler $ */
+/* $Id: vrw.c 6357 2016-05-13 05:00:06Z bmribler $ */
/***********************************************************************
*
diff --git a/hdf/src/vsfld.c b/hdf/src/vsfld.c
index bb30116..ef93ebc 100644
--- a/hdf/src/vsfld.c
+++ b/hdf/src/vsfld.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5760 $";
-#endif
-
-/* $Id: vsfld.c 5760 2012-01-20 12:53:37Z bmribler $ */
+/* $Id: vsfld.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*****************************************************************************
* vsetf.c
diff --git a/hdf/test/CMakeLists.txt b/hdf/test/CMakeLists.txt
index 81d2bbf..1ea5570 100644
--- a/hdf/test/CMakeLists.txt
+++ b/hdf/test/CMakeLists.txt
@@ -1,16 +1,14 @@
-cmake_minimum_required (VERSION 2.8.10)
-PROJECT (HDF4_HDF_TEST C CXX)
+cmake_minimum_required (VERSION 3.1.0)
+project (HDF4_HDF_TEST C CXX)
#-----------------------------------------------------------------------------
# Setup include Directories
#-----------------------------------------------------------------------------
INCLUDE_DIRECTORIES (
- ${CMAKE_Fortran_MODULE_DIRECTORY}
${HDF4_HDF_BINARY_DIR}
${HDF4_HDFSOURCE_DIR}
)
LINK_DIRECTORIES (
- ${CMAKE_Fortran_MODULE_DIRECTORY}
${HDF4_HDF_BINARY_DIR}
${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
)
@@ -60,17 +58,33 @@ if (WIN32)
endif (WIN32)
#-- Adding test for testhdf
-ADD_EXECUTABLE (testhdf ${testhdf_SRCS})
-TARGET_NAMING (testhdf ${LIB_TYPE})
-TARGET_C_PROPERTIES (testhdf " " " ")
+add_executable (testhdf ${testhdf_SRCS})
+TARGET_NAMING (testhdf STATIC)
+TARGET_C_PROPERTIES (testhdf STATIC " " " ")
target_link_libraries (testhdf ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
+set_target_properties (testhdf PROPERTIES FOLDER test)
+if (BUILD_SHARED_LIBS)
+ add_executable (testhdf-shared ${testhdf_SRCS})
+ TARGET_NAMING (testhdf-shared SHARED)
+ TARGET_C_PROPERTIES (testhdf-shared SHARED " " " ")
+ target_link_libraries (testhdf-shared ${HDF4_SRC_LIBSH_TARGET} ${LINK_SHARED_LIBS})
+ set_target_properties (testhdf-shared PROPERTIES FOLDER test)
+endif (BUILD_SHARED_LIBS)
#-- Adding test for buffer
if (NOT WIN32)
- ADD_EXECUTABLE (buffer ${HDF4_HDF_TEST_SOURCE_DIR}/buffer.c)
- TARGET_NAMING (buffer ${LIB_TYPE})
- TARGET_C_PROPERTIES (buffer " " " ")
+ add_executable (buffer ${HDF4_HDF_TEST_SOURCE_DIR}/buffer.c)
+ TARGET_NAMING (buffer STATIC)
+ TARGET_C_PROPERTIES (buffer STATIC " " " ")
target_link_libraries (buffer ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
+ set_target_properties (buffer PROPERTIES FOLDER test)
+ if (BUILD_SHARED_LIBS)
+ add_executable (buffer-shared ${HDF4_HDF_TEST_SOURCE_DIR}/buffer.c)
+ TARGET_NAMING (buffer-shared SHARED)
+ TARGET_C_PROPERTIES (buffer-shared SHARED " " " ")
+ target_link_libraries (buffer-shared ${HDF4_SRC_LIBSH_TARGET} ${LINK_SHARED_LIBS})
+ set_target_properties (buffer-shared PROPERTIES FOLDER test)
+ endif (BUILD_SHARED_LIBS)
endif (NOT WIN32)
##############################################################################
diff --git a/hdf/test/CMakeTests.cmake b/hdf/test/CMakeTests.cmake
index aae0932..e6c4110 100644
--- a/hdf/test/CMakeTests.cmake
+++ b/hdf/test/CMakeTests.cmake
@@ -5,7 +5,14 @@
##############################################################################
##############################################################################
-FILE (MAKE_DIRECTORY ${PROJECT_BINARY_DIR}/testdir)
+file (MAKE_DIRECTORY "${PROJECT_BINARY_DIR}/TEST")
+file (MAKE_DIRECTORY "${PROJECT_BINARY_DIR}/TEST/testdir")
+file (MAKE_DIRECTORY "${PROJECT_BINARY_DIR}/TEST/testfiles")
+if (BUILD_SHARED_LIBS)
+ file (MAKE_DIRECTORY "${PROJECT_BINARY_DIR}/TEST-shared")
+ file (MAKE_DIRECTORY "${PROJECT_BINARY_DIR}/TEST-shared/testdir")
+ file (MAKE_DIRECTORY "${PROJECT_BINARY_DIR}/TEST-shared/testfiles")
+endif (BUILD_SHARED_LIBS)
#-- Copy all the dat files from the test directory into the source directory
set (HDF4_REFERENCE_TEST_FILES
@@ -22,120 +29,136 @@ set (HDF4_REFERENCE_TEST_FILES
)
foreach (h4_file ${HDF4_REFERENCE_TEST_FILES})
- set (dest "${PROJECT_BINARY_DIR}/test_files/${h4_file}")
- #MESSAGE(STATUS " Copying ${h4_file}")
- ADD_CUSTOM_COMMAND (
- TARGET testhdf
- POST_BUILD
- COMMAND ${CMAKE_COMMAND}
- ARGS -E copy_if_different ${HDF4_HDF_TEST_SOURCE_DIR}/test_files/${h4_file} ${dest}
- )
+ set (dest "${PROJECT_BINARY_DIR}/TEST/test_files/${h4_file}")
+ add_custom_command (
+ TARGET testhdf
+ POST_BUILD
+ COMMAND ${CMAKE_COMMAND}
+ ARGS -E copy_if_different ${HDF4_HDF_TEST_SOURCE_DIR}/test_files/${h4_file} ${dest}
+ )
+ if (BUILD_SHARED_LIBS)
+ set (dest "${PROJECT_BINARY_DIR}/TEST-shared/test_files/${h4_file}")
+ add_custom_command (
+ TARGET testhdf-shared
+ POST_BUILD
+ COMMAND ${CMAKE_COMMAND}
+ ARGS -E copy_if_different ${HDF4_HDF_TEST_SOURCE_DIR}/test_files/${h4_file} ${dest}
+ )
+ endif (BUILD_SHARED_LIBS)
endforeach (h4_file ${HDF4_REFERENCE_TEST_FILES})
# Remove any output file left over from previous test run
+set (HDF4_TESTHDF_FILES
+ Block_info.hdf
+ ChunkedGR.hdf
+ ChunkedGR_DF.hdf
+ ChunkedGR_NO.hdf
+ ChunkedGR_RL.hdf
+ ChunkedGR_SK.hdf
+ datainfo_annot.hdf
+ datainfo_dfri.hdf
+ datainfo_images.hdf
+ datainfo_linkblock.hdf
+ datainfo_simple.hdf
+ gr2.hdf
+ gr_chunkcomp.hdf
+ gr_comp.hdf
+ gr_double_test.hdf
+ gr_gzip.hdf
+ gr_jpeg.hdf
+ gr_r8.hdf
+ nntcheck.hdf
+ ntcheck.hdf
+ RI_8.hdf
+ RI_16.hdf
+ RI_32.hdf
+ RI_fl32.hdf
+ RI_fl64.hdf
+ RIchunkedziped.hdf
+ s1w.hdf
+ s3w.hdf
+ s4w.hdf
+ sdstrings.hdf
+ swf32.hdf
+ swf64.hdf
+ swi8.hdf
+ swi16.hdf
+ swi32.hdf
+ swin.hdf
+ swui8.hdf
+ swui16.hdf
+ swui32.hdf
+ swuin.hdf
+ t.hdf
+ t1.hdf
+ t2.hdf
+ t3.hdf
+ t4.hdf
+ tbitio.hdf
+ tblocks.hdf
+ tchunks.hdf
+ tcomp.hdf
+ tdf24.hdf
+ tdfan.hdf
+ temp.hdf
+ thf.hdf
+ tjpeg.hdf
+ tlongnames.hdf
+ tman.hdf
+ tmgr.hdf
+ tmgratt.hdf
+ tmgrchk.hdf
+ tnbit.hdf
+ tref.hdf
+ tuservds.hdf
+ tuservgs.hdf
+ tvattr.hdf
+ tvpack.hdf
+ tvsempty.hdf
+ tvset.hdf
+ tvsetext.hdf
+ tx.hdf
+ Tables_External_File
+)
add_test (
- NAME testhdf-clearall-objects
+ NAME HDF_TEST-testhdf-clearall-objects
COMMAND ${CMAKE_COMMAND}
-E remove
- Block_info.hdf
- ChunkedGR.hdf
- ChunkedGR_DF.hdf
- ChunkedGR_NO.hdf
- ChunkedGR_RL.hdf
- ChunkedGR_SK.hdf
- datainfo_annot.hdf
- datainfo_dfri.hdf
- datainfo_images.hdf
- datainfo_linkblock.hdf
- datainfo_simple.hdf
- gr2.hdf
- gr_chunkcomp.hdf
- gr_comp.hdf
- gr_double_test.hdf
- gr_gzip.hdf
- gr_jpeg.hdf
- gr_r8.hdf
- nntcheck.hdf
- ntcheck.hdf
- RI_8.hdf
- RI_16.hdf
- RI_32.hdf
- RI_fl32.hdf
- RI_fl64.hdf
- RIchunkedziped.hdf
- s1w.hdf
- s3w.hdf
- s4w.hdf
- sdstrings.hdf
- swf32.hdf
- swf64.hdf
- swi8.hdf
- swi16.hdf
- swi32.hdf
- swin.hdf
- swui8.hdf
- swui16.hdf
- swui32.hdf
- swuin.hdf
- t.hdf
- t1.hdf
- t2.hdf
- t3.hdf
- t4.hdf
- tbitio.hdf
- tblocks.hdf
- tchunks.hdf
- tcomp.hdf
- tdf24.hdf
- tdfan.hdf
- temp.hdf
- thf.hdf
- tjpeg.hdf
- tlongnames.hdf
- tman.hdf
- tmgr.hdf
- tmgratt.hdf
- tmgrchk.hdf
- tnbit.hdf
- tref.hdf
- tuservds.hdf
- tuservgs.hdf
- tvattr.hdf
- tvpack.hdf
- tvsempty.hdf
- tvset.hdf
- tvsetext.hdf
- tx.hdf
- Tables_External_File
+ ${HDF4_TESTHDF_FILES}
+ WORKING_DIRECTORY
+ ${PROJECT_BINARY_DIR}/TEST
)
+set_tests_properties (HDF_TEST-testhdf-clearall-objects PROPERTIES LABELS ${PROJECT_NAME})
if (NOT "${last_test}" STREQUAL "")
- set_tests_properties (testhdf-clearall-objects PROPERTIES DEPENDS ${last_test} LABELS ${PROJECT_NAME})
-else (NOT "${last_test}" STREQUAL "")
- set_tests_properties (testhdf-clearall-objects PROPERTIES LABELS ${PROJECT_NAME})
+ set_tests_properties (HDF_TEST-testhdf-clearall-objects PROPERTIES DEPENDS ${last_test})
endif (NOT "${last_test}" STREQUAL "")
-set (last_test "testhdf-clearall-objects")
+set (last_test "HDF_TEST-testhdf-clearall-objects")
+set (HDF4_TESTHDF_THF0_FILES
+ thf0.hdf
+ thf1.hdf
+ thf2.hdf
+ thf3.hdf
+ thf4.hdf
+ thf5.hdf
+ thf6.hdf
+ thf7.hdf
+ thf8.hdf
+ thf9.hdf
+)
add_test (
- NAME testhdf_thf0-clearall-objects
+ NAME HDF_TEST-testhdf_thf0-clearall-objects
COMMAND ${CMAKE_COMMAND}
-E remove
- thf0.hdf
- thf1.hdf
- thf2.hdf
- thf3.hdf
- thf4.hdf
- thf5.hdf
- thf6.hdf
- thf7.hdf
- thf8.hdf
- thf9.hdf
+ ${HDF4_TESTHDF_THF0_FILES}
+ WORKING_DIRECTORY
+ ${PROJECT_BINARY_DIR}/TEST
)
+set_tests_properties (HDF_TEST-testhdf_thf0-clearall-objects PROPERTIES LABELS ${PROJECT_NAME})
if (NOT "${last_test}" STREQUAL "")
- set_tests_properties (testhdf_thf0-clearall-objects PROPERTIES DEPENDS ${last_test} LABELS ${PROJECT_NAME})
-else (NOT "${last_test}" STREQUAL "")
- set_tests_properties (testhdf_thf0-clearall-objects PROPERTIES LABELS ${PROJECT_NAME})
+ set_tests_properties (HDF_TEST-testhdf_thf0-clearall-objects PROPERTIES DEPENDS ${last_test})
endif (NOT "${last_test}" STREQUAL "")
-set (last_test "testhdf_thf0-clearall-objects")
+set (last_test "HDF_TEST-testhdf_thf0-clearall-objects")
set (thf_decade
1 2 3 4 5 6 7 8 9 10
@@ -147,7 +170,7 @@ set (thf_decade
)
foreach (decade ${thf_decade})
add_test (
- NAME testhdf_thf${decade}-clearall-objects
+ NAME HDF_TEST-testhdf_thf${decade}-clearall-objects
COMMAND ${CMAKE_COMMAND}
-E remove
thf${decade}0.hdf
@@ -160,34 +183,118 @@ foreach (decade ${thf_decade})
thf${decade}7.hdf
thf${decade}8.hdf
thf${decade}9.hdf
+ WORKING_DIRECTORY
+ ${PROJECT_BINARY_DIR}/TEST
)
+ set_tests_properties (HDF_TEST-testhdf_thf${decade}-clearall-objects PROPERTIES LABELS ${PROJECT_NAME})
if (NOT "${last_test}" STREQUAL "")
- set_tests_properties (testhdf_thf${decade}-clearall-objects PROPERTIES DEPENDS ${last_test} LABELS ${PROJECT_NAME})
- else (NOT "${last_test}" STREQUAL "")
- set_tests_properties (testhdf_thf${decade}-clearall-objects PROPERTIES LABELS ${PROJECT_NAME})
+ set_tests_properties (HDF_TEST-testhdf_thf${decade}-clearall-objects PROPERTIES DEPENDS ${last_test})
endif (NOT "${last_test}" STREQUAL "")
- set (last_test "testhdf_thf${decade}-clearall-objects")
+ set (last_test "HDF_TEST-testhdf_thf${decade}-clearall-objects")
endforeach (decade ${thf_decade})
-add_test (NAME testhdf COMMAND $<TARGET_FILE:testhdf>)
+add_test (NAME HDF_TEST-testhdf COMMAND $<TARGET_FILE:testhdf>)
set (passRegex "All tests were successful")
-SET_PROPERTY (TEST testhdf PROPERTY PASS_REGULAR_EXPRESSION "${passRegex}")
+set_tests_properties (HDF_TEST-testhdf PROPERTIES
+ PASS_REGULAR_EXPRESSION "${passRegex}"
+ WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/TEST
+ LABELS ${PROJECT_NAME}
+)
if (NOT "${last_test}" STREQUAL "")
- set_tests_properties (testhdf PROPERTIES DEPENDS ${last_test} LABELS ${PROJECT_NAME})
-else (NOT "${last_test}" STREQUAL "")
- set_tests_properties (testhdf PROPERTIES LABELS ${PROJECT_NAME})
+ set_tests_properties (HDF_TEST-testhdf PROPERTIES DEPENDS ${last_test})
endif (NOT "${last_test}" STREQUAL "")
-set (last_test "testhdf")
+set (last_test "HDF_TEST-testhdf")
#-- Adding test for buffer
if (NOT WIN32)
- add_test (NAME buffer COMMAND $<TARGET_FILE:buffer>)
+ add_test (NAME HDF_TEST-buffer COMMAND $<TARGET_FILE:buffer>)
+ set_tests_properties (HDF_TEST-buffer PROPERTIES
+ WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/TEST
+ LABELS ${PROJECT_NAME}
+ )
if (NOT "${last_test}" STREQUAL "")
- set_tests_properties (buffer PROPERTIES DEPENDS ${last_test} LABELS ${PROJECT_NAME})
- else (NOT "${last_test}" STREQUAL "")
- set_tests_properties (buffer PROPERTIES LABELS ${PROJECT_NAME})
+ set_tests_properties (HDF_TEST-buffer PROPERTIES DEPENDS ${last_test})
endif (NOT "${last_test}" STREQUAL "")
- set (last_test "buffer")
+ set (last_test "HDF_TEST-buffer")
endif (NOT WIN32)
+if (BUILD_SHARED_LIBS)
+ # Remove any output file left over from previous test run
+ add_test (
+ NAME HDF_TEST-testhdf-shared-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ${HDF4_TESTHDF_FILES}
+ WORKING_DIRECTORY
+ ${PROJECT_BINARY_DIR}/TEST-shared
+ )
+ set_tests_properties (HDF_TEST-testhdf-shared-clearall-objects PROPERTIES LABELS ${PROJECT_NAME})
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (HDF_TEST-testhdf-shared-clearall-objects PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ set (last_test "HDF_TEST-testhdf-shared-clearall-objects")
+
+ add_test (
+ NAME HDF_TEST-testhdf_thf0-shared-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ${HDF4_TESTHDF_THF0_FILES}
+ WORKING_DIRECTORY
+ ${PROJECT_BINARY_DIR}/TEST-shared
+ )
+ set_tests_properties (HDF_TEST-testhdf_thf0-shared-clearall-objects PROPERTIES LABELS ${PROJECT_NAME})
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (HDF_TEST-testhdf_thf0-shared-clearall-objects PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ set (last_test "HDF_TEST-testhdf_thf0-shared-clearall-objects")
+
+ foreach (decade ${thf_decade})
+ add_test (
+ NAME HDF_TEST-testhdf_thf${decade}-shared-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ thf${decade}0.hdf
+ thf${decade}1.hdf
+ thf${decade}2.hdf
+ thf${decade}3.hdf
+ thf${decade}4.hdf
+ thf${decade}5.hdf
+ thf${decade}6.hdf
+ thf${decade}7.hdf
+ thf${decade}8.hdf
+ thf${decade}9.hdf
+ WORKING_DIRECTORY
+ ${PROJECT_BINARY_DIR}/TEST-shared
+ )
+ set_tests_properties (HDF_TEST-testhdf_thf${decade}-shared-clearall-objects PROPERTIES LABELS ${PROJECT_NAME})
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (HDF_TEST-testhdf_thf${decade}-shared-clearall-objects PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ set (last_test "HDF_TEST-testhdf_thf${decade}-shared-clearall-objects")
+ endforeach (decade ${thf_decade})
+
+ add_test (NAME HDF_TEST-testhdf-shared COMMAND $<TARGET_FILE:testhdf-shared>)
+ set (passRegex "All tests were successful")
+ set_tests_properties (HDF_TEST-testhdf-shared PROPERTIES
+ PASS_REGULAR_EXPRESSION "${passRegex}"
+ WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/TEST-shared
+ LABELS ${PROJECT_NAME}
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (HDF_TEST-testhdf-shared PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ set (last_test "HDF_TEST-testhdf-shared")
+ #-- Adding test for buffer
+ if (NOT WIN32)
+ add_test (NAME HDF_TEST-buffer-shared COMMAND $<TARGET_FILE:buffer-shared>)
+ set_tests_properties (HDF_TEST-buffer-shared PROPERTIES
+ WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/TEST-shared
+ LABELS ${PROJECT_NAME}
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (HDF_TEST-buffer-shared PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ set (last_test "HDF_TEST-buffer-shared")
+ endif (NOT WIN32)
+endif (BUILD_SHARED_LIBS)
diff --git a/hdf/test/Makefile.in b/hdf/test/Makefile.in
index addade0..897c9ce 100644
--- a/hdf/test/Makefile.in
+++ b/hdf/test/Makefile.in
@@ -92,7 +92,20 @@ DIST_COMMON = $(top_srcdir)/config/commence.am \
TESTS = $(am__EXEEXT_1)
subdir = hdf/test
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -462,12 +475,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -510,11 +538,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hdf/test/an.c b/hdf/test/an.c
index 69f153a..8c95afc 100644
--- a/hdf/test/an.c
+++ b/hdf/test/an.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: an.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: an.c 6357 2016-05-13 05:00:06Z bmribler $ */
/***********************************************************
*
diff --git a/hdf/test/anfile.c b/hdf/test/anfile.c
index 4c60425..bea4b08 100644
--- a/hdf/test/anfile.c
+++ b/hdf/test/anfile.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: anfile.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: anfile.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include "tproto.h"
#define TESTFILE "tdfan.hdf"
diff --git a/hdf/test/bitio.c b/hdf/test/bitio.c
index b601ac2..48fa26d 100644
--- a/hdf/test/bitio.c
+++ b/hdf/test/bitio.c
@@ -31,11 +31,7 @@
10/19/93 - Started coding.
*/
-/* $Id: bitio.c 5210 2009-08-21 20:27:12Z brtnfld $ */
-
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5210 $";
-#endif
+/* $Id: bitio.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include "tproto.h"
#include <time.h>
diff --git a/hdf/test/blocks.c b/hdf/test/blocks.c
index 9f60f5e..8902b51 100644
--- a/hdf/test/blocks.c
+++ b/hdf/test/blocks.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: blocks.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: blocks.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include "tproto.h"
#define TESTFILE_NAME "tblocks.hdf"
diff --git a/hdf/test/buffer.c b/hdf/test/buffer.c
index a6908d6..c2d95ab 100644
--- a/hdf/test/buffer.c
+++ b/hdf/test/buffer.c
@@ -42,11 +42,7 @@
*/
-/* $Id: buffer.c 6013 2014-01-10 21:19:02Z acheng $ */
-
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6013 $";
-#endif
+/* $Id: buffer.c 6427 2016-06-13 21:56:09Z byrn $ */
#define TESTMASTER
@@ -452,9 +448,9 @@ main(int argc, char *argv[])
exit(1);
}
- out_buf = malloc(elemsize * sizeof(uint8));
- in_buf = malloc(elemsize * sizeof(uint8));
-
+ out_buf = HDmalloc(elemsize * sizeof(uint8));
+ in_buf = HDmalloc(elemsize * sizeof(uint8));
+
Verbosity = 4; /* Default Verbosity is Low */
Hgetlibversion(&lmajor, &lminor, &lrelease, lstring);
@@ -557,8 +553,8 @@ main(int argc, char *argv[])
remove(hfilename);
}
- free(out_buf);
- free(in_buf);
+ HDfree(out_buf);
+ HDfree(in_buf);
MESSAGE(6, printf("Finished buffered element test\n");
)
diff --git a/hdf/test/chunks.c b/hdf/test/chunks.c
index 22763c6..5ded443 100644
--- a/hdf/test/chunks.c
+++ b/hdf/test/chunks.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6032 $";
-#endif
-
-/* $Id: chunks.c 6032 2014-01-17 18:13:52Z acheng $ */
+/* $Id: chunks.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
* This file tests the Special Chunking Element(HMCxxx) layer of the HDF library.
diff --git a/hdf/test/comp.c b/hdf/test/comp.c
index 6bb6a6a..a75b684 100644
--- a/hdf/test/comp.c
+++ b/hdf/test/comp.c
@@ -30,11 +30,7 @@
10/19/93 - Through this header in.
*/
-/* $Id: comp.c 4932 2007-09-07 17:17:23Z bmribler $ */
-
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
+/* $Id: comp.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include <time.h>
#include "tproto.h"
diff --git a/hdf/test/conv.c b/hdf/test/conv.c
index bda4c39..8aa07dc 100644
--- a/hdf/test/conv.c
+++ b/hdf/test/conv.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/* $Id: conv.c 6036 2014-01-20 17:28:01Z acheng $ */
-
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6036 $";
-#endif
+/* $Id: conv.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
FILE
diff --git a/hdf/test/extelt.c b/hdf/test/extelt.c
index e43562d..8007d74 100644
--- a/hdf/test/extelt.c
+++ b/hdf/test/extelt.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5201 $";
-#endif
-
-/* $Id: extelt.c 5201 2009-06-18 14:11:06Z bmribler $ */
+/* $Id: extelt.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
* These is a first pass at rewriting how these tests for exteranl
diff --git a/hdf/test/file.c b/hdf/test/file.c
index e645bd3..d0896ce 100644
--- a/hdf/test/file.c
+++ b/hdf/test/file.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: file.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: file.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
* Hopen
diff --git a/hdf/test/file1.c b/hdf/test/file1.c
index e5bbefa..24f1b8c 100644
--- a/hdf/test/file1.c
+++ b/hdf/test/file1.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6043 $";
-#endif
-
-/* $Id: file1.c 6043 2014-01-21 21:09:03Z acheng $ */
+/* $Id: file1.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
test opening files and access elements until limits are reached
diff --git a/hdf/test/forsupf.c b/hdf/test/forsupf.c
index 5cbe6b5..f17d41e 100644
--- a/hdf/test/forsupf.c
+++ b/hdf/test/forsupf.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "$Revision: 6036 $";
-#endif
-
-/* $Id: forsupf.c 6036 2014-01-20 17:28:01Z acheng $ */
+/* $Id: forsupf.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include "hdf.h"
#include "fortest.h"
diff --git a/hdf/test/fortest.c b/hdf/test/fortest.c
index 35133ab..bf2f05c 100644
--- a/hdf/test/fortest.c
+++ b/hdf/test/fortest.c
@@ -9,12 +9,9 @@
* of the source code distribution tree; Copyright.html can be found at *
* http://hdfgroup.org/products/hdf4/doc/Copyright.html. If you do not have *
* access to either file, you may request a copy from help at hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6036 $";
-#endif
-
-/* $Id: fortest.c 6036 2014-01-20 17:28:01Z acheng $ */
+/* $Id: fortest.c 6357 2016-05-13 05:00:06Z bmribler $ */
#define TESTMASTER
diff --git a/hdf/test/gentest.c b/hdf/test/gentest.c
index e164add..1e5fb4c 100644
--- a/hdf/test/gentest.c
+++ b/hdf/test/gentest.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/* $Id: gentest.c 5210 2009-08-21 20:27:12Z brtnfld $ */
-
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5210 $";
-#endif
+/* $Id: gentest.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
FILE
diff --git a/hdf/test/litend.c b/hdf/test/litend.c
index b094e8f..ee0780d 100644
--- a/hdf/test/litend.c
+++ b/hdf/test/litend.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5210 $";
-#endif
-
-/* $Id: litend.c 5210 2009-08-21 20:27:12Z brtnfld $ */
+/* $Id: litend.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include "tproto.h"
diff --git a/hdf/test/man.c b/hdf/test/man.c
index 54a8074..52bf38b 100644
--- a/hdf/test/man.c
+++ b/hdf/test/man.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: man.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: man.c 6357 2016-05-13 05:00:06Z bmribler $ */
/***********************************************************
*
diff --git a/hdf/test/mgr.c b/hdf/test/mgr.c
index f718524..df680d9 100644
--- a/hdf/test/mgr.c
+++ b/hdf/test/mgr.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "$Revision: 5584 $";
-#endif
-
-/* $Id: mgr.c 5584 2011-04-13 18:25:06Z bmribler $ */
+/* $Id: mgr.c 6357 2016-05-13 05:00:06Z bmribler $ */
/***********************************************************
*
diff --git a/hdf/test/nbit.c b/hdf/test/nbit.c
index b155ae8..b05bdff 100644
--- a/hdf/test/nbit.c
+++ b/hdf/test/nbit.c
@@ -33,11 +33,7 @@
1/19/94 - Started coding
*/
-/* $Id: nbit.c 5210 2009-08-21 20:27:12Z brtnfld $ */
-
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5210 $";
-#endif
+/* $Id: nbit.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include "tproto.h"
#define TESTFILE_NAME "tnbit.hdf"
diff --git a/hdf/test/rig.c b/hdf/test/rig.c
index aa16f5e..2340414 100644
--- a/hdf/test/rig.c
+++ b/hdf/test/rig.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5830 $";
-#endif
-
-/* $Id: rig.c 5830 2012-07-19 09:08:48Z bmribler $ */
+/* $Id: rig.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include "tproto.h"
diff --git a/hdf/test/sdmms.c b/hdf/test/sdmms.c
index b902dde..de74725 100644
--- a/hdf/test/sdmms.c
+++ b/hdf/test/sdmms.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6035 $";
-#endif
-
-/* $Id: sdmms.c 6035 2014-01-19 07:19:17Z bmribler $ */
+/* $Id: sdmms.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include "tproto.h"
diff --git a/hdf/test/sdnmms.c b/hdf/test/sdnmms.c
index a88e472..2394333 100644
--- a/hdf/test/sdnmms.c
+++ b/hdf/test/sdnmms.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: sdnmms.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: sdnmms.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include "tproto.h"
diff --git a/hdf/test/sdstr.c b/hdf/test/sdstr.c
index 8715c54..a2a09c2 100644
--- a/hdf/test/sdstr.c
+++ b/hdf/test/sdstr.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: sdstr.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: sdstr.c 6357 2016-05-13 05:00:06Z bmribler $ */
/***************************************************************
**
diff --git a/hdf/test/slab.c b/hdf/test/slab.c
index 0ff1321..4b88d98 100644
--- a/hdf/test/slab.c
+++ b/hdf/test/slab.c
@@ -11,10 +11,6 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "$Id: slab.c 4932 2007-09-07 17:17:23Z bmribler $";
-#endif
-
/* $Id */
/***************************************************************************
diff --git a/hdf/test/tbv.c b/hdf/test/tbv.c
index 3aa5db5..ebb52e9 100644
--- a/hdf/test/tbv.c
+++ b/hdf/test/tbv.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5201 $";
-#endif
-
-/* $Id: tbv.c 5201 2009-06-18 14:11:06Z bmribler $ */
+/* $Id: tbv.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
FILE
diff --git a/hdf/test/testhdf.c b/hdf/test/testhdf.c
index 2ad115a..95dafbc 100644
--- a/hdf/test/testhdf.c
+++ b/hdf/test/testhdf.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6036 $";
-#endif
-
-/* $Id: testhdf.c 6036 2014-01-20 17:28:01Z acheng $ */
+/* $Id: testhdf.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
FILE
diff --git a/hdf/test/tree.c b/hdf/test/tree.c
index 8424879..03d707f 100644
--- a/hdf/test/tree.c
+++ b/hdf/test/tree.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: tree.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: tree.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
FILE
diff --git a/hdf/test/tvattr.c b/hdf/test/tvattr.c
index b6b6e5f..657c158 100644
--- a/hdf/test/tvattr.c
+++ b/hdf/test/tvattr.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5912 $";
-#endif
-
-/* $Id: tvattr.c 5912 2013-01-24 23:30:00Z bmribler $ */
+/* $Id: tvattr.c 6357 2016-05-13 05:00:06Z bmribler $ */
/**************************************************************
*
diff --git a/hdf/test/tvset.c b/hdf/test/tvset.c
index ae07d26..642a028 100644
--- a/hdf/test/tvset.c
+++ b/hdf/test/tvset.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6043 $";
-#endif
-
-/* $Id: tvset.c 6043 2014-01-21 21:09:03Z acheng $ */
+/* $Id: tvset.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
*
diff --git a/hdf/test/vers.c b/hdf/test/vers.c
index f8ee58e..c036178 100644
--- a/hdf/test/vers.c
+++ b/hdf/test/vers.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4932 $";
-#endif
-
-/* $Id: vers.c 4932 2007-09-07 17:17:23Z bmribler $ */
+/* $Id: vers.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
***********************************************************************
diff --git a/hdf/util/CMakeLists.txt b/hdf/util/CMakeLists.txt
index fa1e5fc..20578d0 100644
--- a/hdf/util/CMakeLists.txt
+++ b/hdf/util/CMakeLists.txt
@@ -1,5 +1,5 @@
-cmake_minimum_required (VERSION 2.8.10)
-PROJECT (HDF4_HDF_UTIL C CXX)
+cmake_minimum_required (VERSION 3.1.0)
+project (HDF4_HDF_UTIL C CXX)
INCLUDE_DIRECTORIES (${HDF4_HDFSOURCE_DIR})
@@ -9,9 +9,9 @@ endif (WIN32)
if (HDF4_BUILD_TOOLS)
#-- Adding tool hdfls
- ADD_EXECUTABLE (hdfls ${HDF4_HDF_UTIL_SOURCE_DIR}/hdfls.c)
- TARGET_NAMING (hdfls ${LIB_TYPE})
- TARGET_C_PROPERTIES (hdfls " " " ")
+ add_executable (hdfls ${HDF4_HDF_UTIL_SOURCE_DIR}/hdfls.c)
+ TARGET_NAMING (hdfls STATIC)
+ TARGET_C_PROPERTIES (hdfls STATIC " " " ")
target_link_libraries (hdfls ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
#-- Adding utility hdfed
@@ -21,10 +21,10 @@ if (HDF4_BUILD_TOOLS)
${HDF4_HDF_UTIL_SOURCE_DIR}/he_file.c
${HDF4_HDF_UTIL_SOURCE_DIR}/he_main.c
)
-
- ADD_EXECUTABLE (hdfed ${hdfed_SRCS})
- TARGET_NAMING (hdfed ${LIB_TYPE})
- TARGET_C_PROPERTIES (hdfed " " " ")
+
+ add_executable (hdfed ${hdfed_SRCS})
+ TARGET_NAMING (hdfed STATIC)
+ TARGET_C_PROPERTIES (hdfed STATIC " " " ")
target_link_libraries (hdfed ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
endif (HDF4_BUILD_TOOLS)
@@ -34,10 +34,10 @@ if (HDF4_BUILD_UTILS)
${HDF4_HDF_UTIL_SOURCE_DIR}/hdf2gif.c
${HDF4_HDF_UTIL_SOURCE_DIR}/hdfgifwr.c
)
-
- ADD_EXECUTABLE (hdf2gif ${hdf2gif_SRCS})
- TARGET_NAMING (hdf2gif ${LIB_TYPE})
- TARGET_C_PROPERTIES (hdf2gif " " " ")
+
+ add_executable (hdf2gif ${hdf2gif_SRCS})
+ TARGET_NAMING (hdf2gif STATIC)
+ TARGET_C_PROPERTIES (hdf2gif STATIC " " " ")
target_link_libraries (hdf2gif ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
#-- Adding utility gif2hdf
@@ -48,94 +48,94 @@ if (HDF4_BUILD_UTILS)
${HDF4_HDF_UTIL_SOURCE_DIR}/decompress.c
${HDF4_HDF_UTIL_SOURCE_DIR}/writehdf.c
)
-
- ADD_EXECUTABLE (gif2hdf ${gif2hdf_SRCS})
- TARGET_NAMING (gif2hdf ${LIB_TYPE})
- TARGET_C_PROPERTIES (gif2hdf " " " ")
+
+ add_executable (gif2hdf ${gif2hdf_SRCS})
+ TARGET_NAMING (gif2hdf STATIC)
+ TARGET_C_PROPERTIES (gif2hdf STATIC " " " ")
target_link_libraries (gif2hdf ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
#-- Adding utility hdf24to8
- ADD_EXECUTABLE (hdf24to8 ${HDF4_HDF_UTIL_SOURCE_DIR}/hdf24to8.c)
- TARGET_NAMING (hdf24to8 ${LIB_TYPE})
- TARGET_C_PROPERTIES (hdf24to8 " " " ")
+ add_executable (hdf24to8 ${HDF4_HDF_UTIL_SOURCE_DIR}/hdf24to8.c)
+ TARGET_NAMING (hdf24to8 STATIC)
+ TARGET_C_PROPERTIES (hdf24to8 STATIC " " " ")
target_link_libraries (hdf24to8 ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
#-- Adding utility hdftor8
- ADD_EXECUTABLE (hdftor8 ${HDF4_HDF_UTIL_SOURCE_DIR}/hdftor8.c)
- TARGET_NAMING (hdftor8 ${LIB_TYPE})
- TARGET_C_PROPERTIES (hdftor8 " " " ")
+ add_executable (hdftor8 ${HDF4_HDF_UTIL_SOURCE_DIR}/hdftor8.c)
+ TARGET_NAMING (hdftor8 STATIC)
+ TARGET_C_PROPERTIES (hdftor8 STATIC " " " ")
target_link_libraries (hdftor8 ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
#-- Adding utility ristosds
- ADD_EXECUTABLE (ristosds ${HDF4_HDF_UTIL_SOURCE_DIR}/ristosds.c)
- TARGET_NAMING (ristosds ${LIB_TYPE})
- TARGET_C_PROPERTIES (ristosds " " " ")
+ add_executable (ristosds ${HDF4_HDF_UTIL_SOURCE_DIR}/ristosds.c)
+ TARGET_NAMING (ristosds STATIC)
+ TARGET_C_PROPERTIES (ristosds STATIC " " " ")
target_link_libraries (ristosds ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
#-- Adding utility hdfpack
- ADD_EXECUTABLE (hdfpack ${HDF4_HDF_UTIL_SOURCE_DIR}/hdfpack.c)
- TARGET_NAMING (hdfpack ${LIB_TYPE})
- TARGET_C_PROPERTIES (hdfpack " " " ")
+ add_executable (hdfpack ${HDF4_HDF_UTIL_SOURCE_DIR}/hdfpack.c)
+ TARGET_NAMING (hdfpack STATIC)
+ TARGET_C_PROPERTIES (hdfpack STATIC " " " ")
target_link_libraries (hdfpack ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
#-- Adding utility paltohdf
- ADD_EXECUTABLE (paltohdf ${HDF4_HDF_UTIL_SOURCE_DIR}/paltohdf.c)
- TARGET_NAMING (paltohdf ${LIB_TYPE})
- TARGET_C_PROPERTIES (paltohdf " " " ")
+ add_executable (paltohdf ${HDF4_HDF_UTIL_SOURCE_DIR}/paltohdf.c)
+ TARGET_NAMING (paltohdf STATIC)
+ TARGET_C_PROPERTIES (paltohdf STATIC " " " ")
target_link_libraries (paltohdf ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
#-- Adding utility hdftopal
- ADD_EXECUTABLE (hdftopal ${HDF4_HDF_UTIL_SOURCE_DIR}/hdftopal.c)
- TARGET_NAMING (hdftopal ${LIB_TYPE})
- TARGET_C_PROPERTIES (hdftopal " " " ")
+ add_executable (hdftopal ${HDF4_HDF_UTIL_SOURCE_DIR}/hdftopal.c)
+ TARGET_NAMING (hdftopal STATIC)
+ TARGET_C_PROPERTIES (hdftopal STATIC " " " ")
target_link_libraries (hdftopal ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
#-- Adding utility r8tohdf
- ADD_EXECUTABLE (r8tohdf ${HDF4_HDF_UTIL_SOURCE_DIR}/r8tohdf.c)
- TARGET_NAMING (r8tohdf ${LIB_TYPE})
- TARGET_C_PROPERTIES (r8tohdf " " " ")
+ add_executable (r8tohdf ${HDF4_HDF_UTIL_SOURCE_DIR}/r8tohdf.c)
+ TARGET_NAMING (r8tohdf STATIC)
+ TARGET_C_PROPERTIES (r8tohdf STATIC " " " ")
target_link_libraries (r8tohdf ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
#-- Adding utility hdfcomp
- ADD_EXECUTABLE (hdfcomp ${HDF4_HDF_UTIL_SOURCE_DIR}/hdfcomp.c)
- TARGET_NAMING (hdfcomp ${LIB_TYPE})
- TARGET_C_PROPERTIES (hdfcomp " " " ")
+ add_executable (hdfcomp ${HDF4_HDF_UTIL_SOURCE_DIR}/hdfcomp.c)
+ TARGET_NAMING (hdfcomp STATIC)
+ TARGET_C_PROPERTIES (hdfcomp STATIC " " " ")
target_link_libraries (hdfcomp ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
#-- Adding utility jpeg2hdf
- ADD_EXECUTABLE (jpeg2hdf ${HDF4_HDF_UTIL_SOURCE_DIR}/jpeg2hdf.c)
- TARGET_NAMING (jpeg2hdf ${LIB_TYPE})
- TARGET_C_PROPERTIES (jpeg2hdf " " " ")
+ add_executable (jpeg2hdf ${HDF4_HDF_UTIL_SOURCE_DIR}/jpeg2hdf.c)
+ TARGET_NAMING (jpeg2hdf STATIC)
+ TARGET_C_PROPERTIES (jpeg2hdf STATIC " " " ")
target_link_libraries (jpeg2hdf ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
#-- Adding utility hdf2jpeg
- ADD_EXECUTABLE (hdf2jpeg ${HDF4_HDF_UTIL_SOURCE_DIR}/hdf2jpeg.c)
- TARGET_NAMING (hdf2jpeg ${LIB_TYPE})
- TARGET_C_PROPERTIES (hdf2jpeg " " " ")
+ add_executable (hdf2jpeg ${HDF4_HDF_UTIL_SOURCE_DIR}/hdf2jpeg.c)
+ TARGET_NAMING (hdf2jpeg STATIC)
+ TARGET_C_PROPERTIES (hdf2jpeg STATIC " " " ")
target_link_libraries (hdf2jpeg ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
#-- Adding utility hdf8to24
- ADD_EXECUTABLE (hdf8to24 ${HDF4_HDF_UTIL_SOURCE_DIR}/hdf8to24.c)
- TARGET_NAMING (hdf8to24 ${LIB_TYPE})
- TARGET_C_PROPERTIES (hdf8to24 " " " ")
+ add_executable (hdf8to24 ${HDF4_HDF_UTIL_SOURCE_DIR}/hdf8to24.c)
+ TARGET_NAMING (hdf8to24 STATIC)
+ TARGET_C_PROPERTIES (hdf8to24 STATIC " " " ")
target_link_libraries (hdf8to24 ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
#-- Adding utility hdfunpac
- ADD_EXECUTABLE (hdfunpac ${HDF4_HDF_UTIL_SOURCE_DIR}/hdfunpac.c)
- TARGET_NAMING (hdfunpac ${LIB_TYPE})
- TARGET_C_PROPERTIES (hdfunpac " " " ")
+ add_executable (hdfunpac ${HDF4_HDF_UTIL_SOURCE_DIR}/hdfunpac.c)
+ TARGET_NAMING (hdfunpac STATIC)
+ TARGET_C_PROPERTIES (hdfunpac STATIC " " " ")
target_link_libraries (hdfunpac ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
#-- Adding utility vmake
- ADD_EXECUTABLE (vmake ${HDF4_HDF_UTIL_SOURCE_DIR}/vmake.c)
- TARGET_NAMING (vmake ${LIB_TYPE})
- TARGET_C_PROPERTIES (vmake " " " ")
+ add_executable (vmake ${HDF4_HDF_UTIL_SOURCE_DIR}/vmake.c)
+ TARGET_NAMING (vmake STATIC)
+ TARGET_C_PROPERTIES (vmake STATIC " " " ")
target_link_libraries (vmake ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
#-- Adding utility vshow
- ADD_EXECUTABLE (vshow ${HDF4_HDF_UTIL_SOURCE_DIR}/vshow.c)
- TARGET_NAMING (vshow ${LIB_TYPE})
- TARGET_C_PROPERTIES (vshow " " " ")
+ add_executable (vshow ${HDF4_HDF_UTIL_SOURCE_DIR}/vshow.c)
+ TARGET_NAMING (vshow STATIC)
+ TARGET_C_PROPERTIES (vshow STATIC " " " ")
target_link_libraries (vshow ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
endif (HDF4_BUILD_UTILS)
@@ -160,10 +160,7 @@ if (HDF4_BUILD_TOOLS)
TARGETS
hdfls
hdfed
- RUNTIME DESTINATION
- ${HDF4_INSTALL_TOOLS_BIN_DIR}
- COMPONENT
- toolsapplications
+ RUNTIME DESTINATION ${HDF4_INSTALL_TOOLS_BIN_DIR} COMPONENT toolsapplications
)
endif (HDF4_BUILD_TOOLS)
@@ -184,7 +181,7 @@ if (HDF4_BUILD_UTILS)
INSTALL_PROGRAM_PDB (ristosds ${HDF4_INSTALL_UTILS_BIN_DIR} utilsapplications)
INSTALL_PROGRAM_PDB (vmake ${HDF4_INSTALL_UTILS_BIN_DIR} utilsapplications)
INSTALL_PROGRAM_PDB (vshow ${HDF4_INSTALL_UTILS_BIN_DIR} utilsapplications)
-
+
INSTALL (
TARGETS
gif2hdf
@@ -203,9 +200,6 @@ if (HDF4_BUILD_UTILS)
ristosds
vmake
vshow
- RUNTIME DESTINATION
- ${HDF4_INSTALL_UTILS_BIN_DIR}
- COMPONENT
- utilsapplications
+ RUNTIME DESTINATION ${HDF4_INSTALL_UTILS_BIN_DIR} COMPONENT utilsapplications
)
endif (HDF4_BUILD_UTILS)
diff --git a/hdf/util/CMakeTests.cmake b/hdf/util/CMakeTests.cmake
index 5a4fc70..6546a68 100644
--- a/hdf/util/CMakeTests.cmake
+++ b/hdf/util/CMakeTests.cmake
@@ -4,7 +4,7 @@
### T E S T I N G ###
##############################################################################
##############################################################################
-
+
##############################################################################
# --------------------------------------------------------------------
# Copy all the files from the test directory into the source directory
@@ -25,7 +25,7 @@ if (HDF4_BUILD_TOOLS)
ristosds.input1
ristosds.out1
)
-
+
foreach (h4_file ${HDF4_LS_TEST_FILES})
set (dest "${PROJECT_BINARY_DIR}/${h4_file}")
#message (STATUS " Copying ${HDF4_HDF_UTIL_SOURCE_DIR}/${h4_file} to ${PROJECT_BINARY_DIR}/")
@@ -36,22 +36,22 @@ if (HDF4_BUILD_TOOLS)
ARGS -E copy_if_different ${HDF4_HDF_UTIL_SOURCE_DIR}/${h4_file} ${dest}
)
endforeach (h4_file ${HDF4_LS_TEST_FILES})
-
- if (WIN32)
+
+ if (WIN32 AND MSVC_VERSION LESS 1900)
ADD_CUSTOM_COMMAND (
TARGET hdfls
POST_BUILD
COMMAND ${CMAKE_COMMAND}
ARGS -E copy_if_different ${HDF4_HDF_UTIL_SOURCE_DIR}/hdfed-w.out1 ${PROJECT_BINARY_DIR}/hdfed.out1
)
- else (WIN32)
+ else (WIN32 AND MSVC_VERSION LESS 1900)
ADD_CUSTOM_COMMAND (
TARGET hdfls
POST_BUILD
COMMAND ${CMAKE_COMMAND}
ARGS -E copy_if_different ${HDF4_HDF_UTIL_SOURCE_DIR}/hdfed.out1 ${PROJECT_BINARY_DIR}/hdfed.out1
)
- endif (WIN32)
+ endif (WIN32 AND MSVC_VERSION LESS 1900)
#-- Copy all the hdfed data files from the source directory into the test directory
set (HDF4_HDFED_TEST_FILES
@@ -213,7 +213,7 @@ endif (HDF4_BUILD_TOOLS)
ARGS -E copy_if_different ${HDF4_HDF_UTIL_SOURCE_DIR}/testfiles/${h4_file} ${dest}
)
endforeach (h4_file ${HDF4_JPEG2HDF_TEST_FILES})
-
+
##############################################################################
##############################################################################
### T H E T E S T S M A C R O S ###
@@ -337,7 +337,7 @@ endif (HDF4_BUILD_TOOLS)
add_test (
NAME hdfgif-clear-refs
COMMAND ${CMAKE_COMMAND}
- -E remove
+ -E remove
skull.gif
SunWheel.hdf
bttrfly.hdf
@@ -377,7 +377,7 @@ endif (HDF4_BUILD_TOOLS)
add_test (
NAME hdfpack-clear-refs
COMMAND ${CMAKE_COMMAND}
- -E remove
+ -E remove
test.pck
test.blk
test.hdf.tmp
@@ -403,7 +403,7 @@ endif (HDF4_BUILD_TOOLS)
add_test (
NAME hdfpalette-clear-refs
COMMAND ${CMAKE_COMMAND}
- -E remove
+ -E remove
pal001
pal005
palette.hdf
@@ -457,7 +457,7 @@ endif (HDF4_BUILD_TOOLS)
add_test (
NAME hdfjpeg-clear-refs
COMMAND ${CMAKE_COMMAND}
- -E remove
+ -E remove
jpeg.hdf
jpeg.hdf.tmp
jpeg.hdf.tmp.err
diff --git a/hdf/util/Makefile.in b/hdf/util/Makefile.in
index 39fac97..ce45357 100644
--- a/hdf/util/Makefile.in
+++ b/hdf/util/Makefile.in
@@ -97,7 +97,20 @@ bin_PROGRAMS = gif2hdf$(EXEEXT) hdf2gif$(EXEEXT) hdf2jpeg$(EXEEXT) \
TESTS = $(TEST_SCRIPT)
subdir = hdf/util
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -517,12 +530,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -565,11 +593,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hdf/util/gif2hdf.c b/hdf/util/gif2hdf.c
index 59175f6..54fbfa3 100644
--- a/hdf/util/gif2hdf.c
+++ b/hdf/util/gif2hdf.c
@@ -50,8 +50,8 @@ char *argc[];
- strncpy(GIFFileName , argc[1] , VSNAMELENMAX - 1);
- strncpy(HDFFileName , argc[2] , VSNAMELENMAX - 1);
+ HDstrncpy(GIFFileName , argc[1] , VSNAMELENMAX - 1);
+ HDstrncpy(HDFFileName , argc[2] , VSNAMELENMAX - 1);
GIFFileName[VSNAMELENMAX - 1] = '\0';
HDFFileName[VSNAMELENMAX - 1] = '\0';
@@ -102,18 +102,18 @@ char *argc[];
{
gifImageDesc = *(GifMemoryStruct.GifImageDesc[i]);
if (gifImageDesc.Image != NULL)
- free(gifImageDesc.Image);
+ HDfree(gifImageDesc.Image);
if (GifMemoryStruct.GifGraphicControlExtension[i] != NULL)
- free(GifMemoryStruct.GifGraphicControlExtension[i]);
+ HDfree(GifMemoryStruct.GifGraphicControlExtension[i]);
}
- free(StartPos);
+ HDfree(StartPos);
- free (GifMemoryStruct.GifHeader);
+ HDfree (GifMemoryStruct.GifHeader);
if (GifMemoryStruct.GifApplicationExtension != NULL)
- free (GifMemoryStruct.GifApplicationExtension);
+ HDfree (GifMemoryStruct.GifApplicationExtension);
return (0);
}
diff --git a/hdf/util/gif2mem.c b/hdf/util/gif2mem.c
index 688ffca..610bd2d 100644
--- a/hdf/util/gif2mem.c
+++ b/hdf/util/gif2mem.c
@@ -200,7 +200,7 @@ BYTE *MemGif;
** Decompress the Image
*/
gifImageDesc[ImageCount-1]->Image = Decompress(gifImageDesc[ImageCount-1] , gifHead);
- free(gifImageDesc[ImageCount-1]->GIFImage);
+ HDfree(gifImageDesc[ImageCount-1]->GIFImage);
/*
** Convert the local palette into an HDF compatible palette
diff --git a/hdf/util/h4cc.in b/hdf/util/h4cc.in
index 4864234..30b3c08 100644
--- a/hdf/util/h4cc.in
+++ b/hdf/util/h4cc.in
@@ -113,7 +113,7 @@ for arg in $@ ; do
compile_args="$compile_args $arg"
if test "x$do_link" = "xyes" -a -n "$output_file"; then
- compile_args="$compile_args -o $outputfile"
+ compile_args="$compile_args -o $output_file"
fi
do_link="no"
diff --git a/hdf/util/h4fc.in b/hdf/util/h4fc.in
index 2eb5929..ae893b1 100644
--- a/hdf/util/h4fc.in
+++ b/hdf/util/h4fc.in
@@ -95,7 +95,7 @@ for arg in $@ ; do
compile_args="$compile_args $arg"
if test "x$do_link" = "xyes" -a -n "$output_file"; then
- compile_args="$compile_args -o $outputfile"
+ compile_args="$compile_args -o $output_file"
fi
do_link="no"
diff --git a/hdf/util/hdf24to8.c b/hdf/util/hdf24to8.c
index 823c25a..179222b 100644
--- a/hdf/util/hdf24to8.c
+++ b/hdf/util/hdf24to8.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6013 $";
-#endif
-
-/* $Id: hdf24to8.c 6013 2014-01-10 21:19:02Z acheng $ */
+/* $Id: hdf24to8.c 6357 2016-05-13 05:00:06Z bmribler $ */
/**************************************************************************
* hdf24hdf8 Quantizes a HDF RGB 24 bit "pixel" image into a 8 bit image
diff --git a/hdf/util/hdf2jpeg.c b/hdf/util/hdf2jpeg.c
index 683be7a..97e6ac5 100644
--- a/hdf/util/hdf2jpeg.c
+++ b/hdf/util/hdf2jpeg.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6013 $";
-#endif
-
-/* $Id: hdf2jpeg.c 6013 2014-01-10 21:19:02Z acheng $ */
+/* $Id: hdf2jpeg.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include "hdf.h"
#include "hfile.h"
diff --git a/hdf/util/hdf8to24.c b/hdf/util/hdf8to24.c
index fe156cc..7c154a5 100644
--- a/hdf/util/hdf8to24.c
+++ b/hdf/util/hdf8to24.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6013 $";
-#endif
-
-/* $Id: hdf8to24.c 6013 2014-01-10 21:19:02Z acheng $ */
+/* $Id: hdf8to24.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include "hdf.h"
#ifndef I860
#include <stdlib.h>
diff --git a/hdf/util/hdfcomp.c b/hdf/util/hdfcomp.c
index ea69982..3827710 100644
--- a/hdf/util/hdfcomp.c
+++ b/hdf/util/hdfcomp.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "$Revision: 6013 $";
-#endif
-
-/* $Id: hdfcomp.c 6013 2014-01-10 21:19:02Z acheng $ */
+/* $Id: hdfcomp.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
* hdfcomp.c
diff --git a/hdf/util/hdfls.c b/hdf/util/hdfls.c
index 08f8447..673abbf 100644
--- a/hdf/util/hdfls.c
+++ b/hdf/util/hdfls.c
@@ -12,11 +12,7 @@
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6013 $";
-#endif
-
-/* $Id: hdfls.c 6013 2014-01-10 21:19:02Z acheng $ */
+/* $Id: hdfls.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include "hdf.h"
#include "hfile.h"
diff --git a/hdf/util/hdfpack.c b/hdf/util/hdfpack.c
index dbe6197..c9571c5 100644
--- a/hdf/util/hdfpack.c
+++ b/hdf/util/hdfpack.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6013 $";
-#endif
-
-/* $Id: hdfpack.c 6013 2014-01-10 21:19:02Z acheng $ */
+/* $Id: hdfpack.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
** FILE
diff --git a/hdf/util/hdftopal.c b/hdf/util/hdftopal.c
index 9115103..6b695d4 100644
--- a/hdf/util/hdftopal.c
+++ b/hdf/util/hdftopal.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6013 $";
-#endif
-
-/* $Id: hdftopal.c 6013 2014-01-10 21:19:02Z acheng $ */
+/* $Id: hdftopal.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
* hdftopal.c
diff --git a/hdf/util/hdftor8.c b/hdf/util/hdftor8.c
index 3855a91..45ff6bf 100644
--- a/hdf/util/hdftor8.c
+++ b/hdf/util/hdftor8.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6013 $";
-#endif
-
-/* $Id: hdftor8.c 6013 2014-01-10 21:19:02Z acheng $ */
+/* $Id: hdftor8.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
* hdftor8.c
diff --git a/hdf/util/hdfunpac.c b/hdf/util/hdfunpac.c
index 07a79fc..bca26fb 100644
--- a/hdf/util/hdfunpac.c
+++ b/hdf/util/hdfunpac.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6013 $";
-#endif
-
-/* $Id: hdfunpac.c 6013 2014-01-10 21:19:02Z acheng $ */
+/* $Id: hdfunpac.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
** FILE
diff --git a/hdf/util/he_cntrl.c b/hdf/util/he_cntrl.c
index 0b6e49d..ae0524c 100644
--- a/hdf/util/he_cntrl.c
+++ b/hdf/util/he_cntrl.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4937 $";
-#endif
-
-/* $Id: he_cntrl.c 4937 2007-09-10 16:36:16Z pvn $ */
+/* $Id: he_cntrl.c 6357 2016-05-13 05:00:06Z bmribler $ */
/* ------ he-cntrl.c ------
This file contains much of the contol mechanisms for HDFed
diff --git a/hdf/util/he_disp.c b/hdf/util/he_disp.c
index 98da557..132f438 100644
--- a/hdf/util/he_disp.c
+++ b/hdf/util/he_disp.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 4937 $";
-#endif
-
-/* $Id: he_disp.c 4937 2007-09-10 16:36:16Z pvn $ */
+/* $Id: he_disp.c 6357 2016-05-13 05:00:06Z bmribler $ */
/* display.c -- contains code for displaying an image using ICR
* this code is plucked from hdfrseq.c
diff --git a/hdf/util/he_file.c b/hdf/util/he_file.c
index a2a1aff..49ca896 100644
--- a/hdf/util/he_file.c
+++ b/hdf/util/he_file.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6043 $";
-#endif
-
-/* $Id: he_file.c 6043 2014-01-21 21:09:03Z acheng $ */
+/* $Id: he_file.c 6357 2016-05-13 05:00:06Z bmribler $ */
/* --- he-file.c --- file and annotation manipulation routines */
#include "he.h"
diff --git a/hdf/util/he_main.c b/hdf/util/he_main.c
index 74e40bc..237cda2 100644
--- a/hdf/util/he_main.c
+++ b/hdf/util/he_main.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6036 $";
-#endif
-
-/* $Id: he_main.c 6036 2014-01-20 17:28:01Z acheng $ */
+/* $Id: he_main.c 6357 2016-05-13 05:00:06Z bmribler $ */
/******************************************************************************
* he - HDF editor
diff --git a/hdf/util/jpeg2hdf.c b/hdf/util/jpeg2hdf.c
index 802d885..051b5f6 100644
--- a/hdf/util/jpeg2hdf.c
+++ b/hdf/util/jpeg2hdf.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char *RcsId[] = "@(#)$Revision: 6013 $";
-#endif
-
-/* $Id: jpeg2hdf.c 6013 2014-01-10 21:19:02Z acheng $ */
+/* $Id: jpeg2hdf.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include "hdf.h"
#ifndef I860
#include <stdlib.h>
diff --git a/hdf/util/paltohdf.c b/hdf/util/paltohdf.c
index fa99c7e..da7102b 100644
--- a/hdf/util/paltohdf.c
+++ b/hdf/util/paltohdf.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#) $Revision: 6013 $"
-#endif
-
-/* $Id: paltohdf.c 6013 2014-01-10 21:19:02Z acheng $ */
+/* $Id: paltohdf.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
* paltohdf.c
diff --git a/hdf/util/r8tohdf.c b/hdf/util/r8tohdf.c
index 8d6bb69..eefd3d4 100644
--- a/hdf/util/r8tohdf.c
+++ b/hdf/util/r8tohdf.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6013 $";
-#endif
-
-/* $Id: r8tohdf.c 6013 2014-01-10 21:19:02Z acheng $ */
+/* $Id: r8tohdf.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
* r8tohdf.c
diff --git a/hdf/util/ristosds.c b/hdf/util/ristosds.c
index ddb1cf2..be2d47b 100644
--- a/hdf/util/ristosds.c
+++ b/hdf/util/ristosds.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6013 $";
-#endif
-
-/* $Id: ristosds.c 6013 2014-01-10 21:19:02Z acheng $ */
+/* $Id: ristosds.c 6357 2016-05-13 05:00:06Z bmribler $ */
/* This program converts a series raster image hdf files into */
/* a single 3D sds hdf file. Each ris hdf file contains one */
diff --git a/hdf/util/vmake.c b/hdf/util/vmake.c
index 01e34bf..69cbe07 100644
--- a/hdf/util/vmake.c
+++ b/hdf/util/vmake.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char *RcsId[] = "@(#)$Revision: 6013 $";
-#endif
-
-/* $Id: vmake.c 6013 2014-01-10 21:19:02Z acheng $ */
+/* $Id: vmake.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*****************************************************************************
*
diff --git a/hdf/util/vshow.c b/hdf/util/vshow.c
index bf7aea5..74a28b3 100644
--- a/hdf/util/vshow.c
+++ b/hdf/util/vshow.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char *RcsId[] = "@(#)$Revision: 6043 $";
-#endif
-
-/* $Id: vshow.c 6043 2014-01-21 21:09:03Z acheng $ */
+/* $Id: vshow.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*****************************************************************************
*
diff --git a/hdf/util/writehdf.c b/hdf/util/writehdf.c
index 82b58e1..39fecef 100644
--- a/hdf/util/writehdf.c
+++ b/hdf/util/writehdf.c
@@ -105,9 +105,9 @@ char *GIFFileName;
for (i = 0 ; i < CommentCount ; i++) {
sprintf(CommentName , "Comment Extension Data %d" , (int)i);
status = Vsetattr (vgroup_id , CommentName , DFNT_CHAR8 , (int32)(GifMemoryStruct.GifCommentExtension[i])->DataSize , (VOIDP)(GifMemoryStruct.GifCommentExtension[i])->CommentData);
- free(GifMemoryStruct.GifCommentExtension[i]);
+ HDfree(GifMemoryStruct.GifCommentExtension[i]);
}
- free(GifMemoryStruct.GifCommentExtension);
+ HDfree(GifMemoryStruct.GifCommentExtension);
for (i = 0 ; i < ApplicationCount ; i++) {
sprintf(ApplicationName , "Application Extension Data %d", (int)i);
diff --git a/java/CMakeLists.txt b/java/CMakeLists.txt
new file mode 100644
index 0000000..fb0ab1c
--- /dev/null
+++ b/java/CMakeLists.txt
@@ -0,0 +1,75 @@
+cmake_minimum_required(VERSION 3.1.0)
+PROJECT ( HDF4_JAVA C Java )
+
+set (CMAKE_MODULE_PATH "${HDF_RESOURCES_DIR};${HDF_RESOURCES_EXT_DIR}")
+find_package (Java)
+#-----------------------------------------------------------------------------
+# Include some macros for reusable code
+#-----------------------------------------------------------------------------
+include (${HDF_RESOURCES_DIR}/UseJava.cmake)
+
+message (STATUS "JAVA: JAVA_HOME=$ENV{JAVA_HOME} JAVA_ROOT=$ENV{JAVA_ROOT}")
+find_package (JNI)
+
+INCLUDE_DIRECTORIES ( ${JNI_INCLUDE_DIRS} )
+
+if (WIN32)
+ set (HDF_JRE_DIRECTORY "C:/Program Files/Java/jre8")
+else (WIN32)
+ set (HDF_JRE_DIRECTORY "/usr/lib/jvm/jre")
+endif (WIN32)
+
+#-----------------------------------------------------------------------------
+# Include the main src and config directories
+#-----------------------------------------------------------------------------
+set (HDF4_JAVA_INCLUDE_DIRECTORIES
+ ${HDF4_JAVA_JNI_SRC_DIR}
+ ${JAVA_INCLUDE_PATH}
+ ${JAVA_INCLUDE_PATH2}
+)
+INCLUDE_DIRECTORIES (${HDF4_JAVA_INCLUDE_DIRECTORIES})
+set (CMAKE_JAVA_INCLUDE_PATH "")
+
+
+#-----------------------------------------------------------------------------
+# Traverse source subdirectory
+#-----------------------------------------------------------------------------
+add_subdirectory (${HDF4_JAVA_SOURCE_DIR}/src ${HDF4_JAVA_BINARY_DIR}/src)
+
+#-----------------------------------------------------------------------------
+# Build the Java Examples
+#-----------------------------------------------------------------------------
+if (HDF4_BUILD_EXAMPLES)
+ add_subdirectory (${HDF4_JAVA_SOURCE_DIR}/examples ${HDF4_JAVA_BINARY_DIR}/examples)
+endif (HDF4_BUILD_EXAMPLES)
+
+#-----------------------------------------------------------------------------
+# Testing
+#-----------------------------------------------------------------------------
+if (BUILD_TESTING)
+ add_subdirectory (${HDF4_JAVA_SOURCE_DIR}/test ${HDF4_JAVA_BINARY_DIR}/test)
+endif (BUILD_TESTING)
+
+#-----------------------------------------------------------------------------
+# Add Required Jar(s)
+#-----------------------------------------------------------------------------
+install (
+ FILES
+ ${HDF4_JAVA_LOGGING_JAR}
+ ${HDF4_JAVA_LOGGING_NOP_JAR}
+ ${HDF4_JAVA_LOGGING_SIMPLE_JAR}
+ DESTINATION ${HDF4_INSTALL_JAR_DIR}
+ COMPONENT libraries
+)
+
+#-----------------------------------------------------------------------------
+# Option to include jre
+#-----------------------------------------------------------------------------
+option (HDF4_JAVA_PACK_JRE "Package a JRE installer directory" OFF)
+if (HDF4_JAVA_PACK_JRE)
+ install (
+ DIRECTORY ${HDF_JRE_DIRECTORY}
+ DESTINATION ${HDF4_INSTALL_BIN_DIR}
+ USE_SOURCE_PERMISSIONS
+ )
+endif (HDF4_JAVA_PACK_JRE)
diff --git a/java/Makefile.am b/java/Makefile.am
new file mode 100644
index 0000000..8588d8d
--- /dev/null
+++ b/java/Makefile.am
@@ -0,0 +1,16 @@
+#
+# HDF Java native interface (JNI) Library Makefile(.in)
+
+include $(top_srcdir)/config/commence.am
+
+## Only recurse into subdirectories if the Java (JNI) interface is enabled.
+if BUILD_JAVA_CONDITIONAL
+
+# Mark this directory as part of the JNI API
+JAVA_API=yes
+
+SUBDIRS=src test examples
+
+endif
+
+include $(top_srcdir)/config/conclude.am
diff --git a/mfhdf/fortran/Makefile.in b/java/Makefile.in
similarity index 80%
copy from mfhdf/fortran/Makefile.in
copy to java/Makefile.in
index fe1d7fd..842dd98 100644
--- a/mfhdf/fortran/Makefile.in
+++ b/java/Makefile.in
@@ -14,9 +14,8 @@
@SET_MAKE@
-#############################################################################
-#############################################################################
-
+#
+# HDF Java native interface (JNI) Library Makefile(.in)
VPATH = @srcdir@
am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)'
am__make_running_with_option = \
@@ -84,38 +83,30 @@ host_triplet = @host@
DIST_COMMON = $(top_srcdir)/config/commence.am \
$(top_srcdir)/config/conclude.am $(srcdir)/Makefile.in \
$(srcdir)/Makefile.am $(top_srcdir)/bin/mkinstalldirs \
- $(srcdir)/ftest.f.in $(srcdir)/jackets.c.in \
- $(srcdir)/netcdf.inc.in $(srcdir)/testfortran.sh.in \
- $(am__include_HEADERS_DIST) $(top_srcdir)/bin/test-driver
- at HDF_BUILD_NETCDF_FALSE@check_PROGRAMS = hdftest$(EXEEXT) \
- at HDF_BUILD_NETCDF_FALSE@ tszip$(EXEEXT) hdftest1$(EXEEXT)
- at HDF_BUILD_NETCDF_TRUE@check_PROGRAMS = ftest$(EXEEXT) \
- at HDF_BUILD_NETCDF_TRUE@ hdftest$(EXEEXT) tszip$(EXEEXT) \
- at HDF_BUILD_NETCDF_TRUE@ hdftest1$(EXEEXT)
-TESTS = $(TEST_SCRIPT)
-subdir = mfhdf/fortran
-SUBDIRS =
+ $(top_srcdir)/bin/test-driver
+TESTS =
+subdir = java
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
CONFIG_HEADER = $(top_builddir)/hdf/src/h4config.h
-CONFIG_CLEAN_FILES = ftest.f jackets.c netcdf.inc testfortran.sh
+CONFIG_CLEAN_FILES =
CONFIG_CLEAN_VPATH_FILES =
-am__ftest_SOURCES_DIST = ftest.f
- at HDF_BUILD_NETCDF_TRUE@am_ftest_OBJECTS = ftest.$(OBJEXT)
-ftest_OBJECTS = $(am_ftest_OBJECTS)
-AM_V_lt = $(am__v_lt_ at AM_V@)
-am__v_lt_ = $(am__v_lt_ at AM_DEFAULT_V@)
-am__v_lt_0 = --silent
-am__v_lt_1 =
-am_hdftest_OBJECTS = hdftest.$(OBJEXT)
-hdftest_OBJECTS = $(am_hdftest_OBJECTS)
-am_hdftest1_OBJECTS = hdftest1.$(OBJEXT)
-hdftest1_OBJECTS = $(am_hdftest1_OBJECTS)
-am_tszip_OBJECTS = tszip.$(OBJEXT)
-tszip_OBJECTS = $(am_tszip_OBJECTS)
AM_V_P = $(am__v_P_ at AM_V@)
am__v_P_ = $(am__v_P_ at AM_DEFAULT_V@)
am__v_P_0 = false
@@ -128,26 +119,8 @@ AM_V_at = $(am__v_at_ at AM_V@)
am__v_at_ = $(am__v_at_ at AM_DEFAULT_V@)
am__v_at_0 = @
am__v_at_1 =
-DEFAULT_INCLUDES = -I. at am__isrc@ -I$(top_builddir)/hdf/src
-F77COMPILE = $(F77) $(AM_FFLAGS) $(FFLAGS)
-LTF77COMPILE = $(LIBTOOL) $(AM_V_lt) --tag=F77 $(AM_LIBTOOLFLAGS) \
- $(LIBTOOLFLAGS) --mode=compile $(F77) $(AM_FFLAGS) $(FFLAGS)
-AM_V_F77 = $(am__v_F77_ at AM_V@)
-am__v_F77_ = $(am__v_F77_ at AM_DEFAULT_V@)
-am__v_F77_0 = @echo " F77 " $@;
-am__v_F77_1 =
-F77LD = $(F77)
-F77LINK = $(LIBTOOL) $(AM_V_lt) --tag=F77 $(AM_LIBTOOLFLAGS) \
- $(LIBTOOLFLAGS) --mode=link $(F77LD) $(AM_FFLAGS) $(FFLAGS) \
- $(AM_LDFLAGS) $(LDFLAGS) -o $@
-AM_V_F77LD = $(am__v_F77LD_ at AM_V@)
-am__v_F77LD_ = $(am__v_F77LD_ at AM_DEFAULT_V@)
-am__v_F77LD_0 = @echo " F77LD " $@;
-am__v_F77LD_1 =
-SOURCES = $(ftest_SOURCES) $(hdftest_SOURCES) $(hdftest1_SOURCES) \
- $(tszip_SOURCES)
-DIST_SOURCES = $(am__ftest_SOURCES_DIST) $(hdftest_SOURCES) \
- $(hdftest1_SOURCES) $(tszip_SOURCES)
+SOURCES =
+DIST_SOURCES =
RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \
ctags-recursive dvi-recursive html-recursive info-recursive \
install-data-recursive install-dvi-recursive \
@@ -161,36 +134,6 @@ am__can_run_installinfo = \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
-am__include_HEADERS_DIST = mffunc.inc mffunc.f90 netcdf.inc netcdf.f90
-am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
-am__vpath_adj = case $$p in \
- $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
- *) f=$$p;; \
- esac;
-am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
-am__install_max = 40
-am__nobase_strip_setup = \
- srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
-am__nobase_strip = \
- for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
-am__nobase_list = $(am__nobase_strip_setup); \
- for p in $$list; do echo "$$p $$p"; done | \
- sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
- $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
- if (++n[$$2] == $(am__install_max)) \
- { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
- END { for (dir in files) print dir, files[dir] }'
-am__base_list = \
- sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
- sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
-am__uninstall_files_from_dir = { \
- test -z "$$files" \
- || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
- || { echo " ( cd '$$dir' && rm -f" $$files ")"; \
- $(am__cd) "$$dir" && rm -f $$files; }; \
- }
-am__installdirs = "$(DESTDIR)$(includedir)"
-HEADERS = $(include_HEADERS)
RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \
distclean-recursive maintainer-clean-recursive
am__recursive_targets = \
@@ -240,6 +183,33 @@ am__tty_colors = { \
std='[m'; \
fi; \
}
+am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
+am__vpath_adj = case $$p in \
+ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
+ *) f=$$p;; \
+ esac;
+am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
+am__install_max = 40
+am__nobase_strip_setup = \
+ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
+am__nobase_strip = \
+ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
+am__nobase_list = $(am__nobase_strip_setup); \
+ for p in $$list; do echo "$$p $$p"; done | \
+ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
+ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
+ if (++n[$$2] == $(am__install_max)) \
+ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
+ END { for (dir in files) print dir, files[dir] }'
+am__base_list = \
+ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
+ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
+am__uninstall_files_from_dir = { \
+ test -z "$$files" \
+ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
+ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \
+ $(am__cd) "$$dir" && rm -f $$files; }; \
+ }
am__recheck_rx = ^[ ]*:recheck:[ ]*
am__global_test_result_rx = ^[ ]*:global-test-result:[ ]*
am__copy_in_global_log_rx = ^[ ]*:copy-in-global-log:[ ]*
@@ -390,6 +360,7 @@ am__set_b = \
*) \
b='$*';; \
esac
+DIST_SUBDIRS = src test examples
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
am__relativize = \
dir0=`pwd`; \
@@ -458,12 +429,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -506,11 +492,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
@@ -583,44 +572,11 @@ ACLOCAL_AMFLAGS = "-I m4"
# .chkexe files are used to mark tests that have run successfully.
# .chklog files are output from those tests.
+CHECK_CLEANFILES = *.chkexe *.chklog
-#############################################################################
-#############################################################################
-CHECK_CLEANFILES = *.chkexe *.chklog test.nc copy.nc *.hdf \
- testdir/testext.hdf
-fort_INCLUDES = -I$(top_srcdir)/hdf/src \
- -I$(top_srcdir)/hdf/test \
- -I$(top_srcdir)/mfhdf/libsrc \
- -I$(top_builddir)/mfhdf/libsrc
-
-DEFINES = -DNDEBUG -DHDF
-AM_CPPFLAGS = $(fort_INCLUDES) $(DEFINES)
-DIST_SUBDIRS = examples
- at HDF_BUILD_NETCDF_FALSE@include_HEADERS = mffunc.inc mffunc.f90
-
-#############################################################################
-#############################################################################
- at HDF_BUILD_NETCDF_TRUE@include_HEADERS = netcdf.inc netcdf.f90 mffunc.inc mffunc.f90
-TEST_SCRIPT = testfortran.sh
-check_SCRIPTS = testfortran.sh
- at HDF_BUILD_NETCDF_TRUE@ftest_SOURCES = ftest.f
- at HDF_BUILD_NETCDF_TRUE@ftest_LDADD = $(top_builddir)/hdf/test/forsupff.o $(top_builddir)/hdf/test/forsupf.o \
- at HDF_BUILD_NETCDF_TRUE@ $(LIBMFHDF) $(LIBHDF) @LIBS@
-
- at HDF_BUILD_NETCDF_TRUE@ftest_DEPENDENCIES = $(top_builddir)/hdf/test/forsupff.o $(top_builddir)/hdf/test/forsupf.o \
- at HDF_BUILD_NETCDF_TRUE@ $(LIBMFHDF) $(LIBHDF)
-
-hdftest_SOURCES = hdftest.f
-hdftest_LDADD = $(LIBMFHDF) $(LIBHDF) @LIBS@
-hdftest_DEPENDENCIES = testdir $(LIBMFHDF) $(LIBHDF)
-hdftest1_SOURCES = hdftest1.f
-hdftest1_LDADD = $(LIBMFHDF) $(LIBHDF) @LIBS@
-hdftest1_DEPENDENCIES = testdir $(LIBMFHDF) $(LIBHDF)
-tszip_SOURCES = tszip.f
-tszip_LDADD = $(LIBMFHDF) $(LIBHDF) @LIBS@
-tszip_DEPENDENCIES = testdir $(LIBMFHDF) $(LIBHDF)
-SUFFIXES = .inc .f90
-DISTCLEANFILES = ftest.f jackets.c netcdf.inc mffunc.f90 netcdf.f90
+# Mark this directory as part of the JNI API
+ at BUILD_JAVA_CONDITIONAL_TRUE@JAVA_API = yes
+ at BUILD_JAVA_CONDITIONAL_TRUE@SUBDIRS = src test examples
# Automake needs to be taught how to build lib, progs, and tests targets.
# These will be filled in automatically for the most part (e.g.,
@@ -642,7 +598,7 @@ TEST_SCRIPT_CHKSH = $(TEST_SCRIPT:=.chkexe_)
all: all-recursive
.SUFFIXES:
-.SUFFIXES: .inc .f90 .f .lo .log .o .obj .sh .sh$(EXEEXT) .trs
+.SUFFIXES: .log .sh .sh$(EXEEXT) .trs
$(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(top_srcdir)/config/commence.am $(top_srcdir)/config/conclude.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
@@ -652,9 +608,9 @@ $(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(top_srcdir
exit 1;; \
esac; \
done; \
- echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign mfhdf/fortran/Makefile'; \
+ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign java/Makefile'; \
$(am__cd) $(top_srcdir) && \
- $(AUTOMAKE) --foreign mfhdf/fortran/Makefile
+ $(AUTOMAKE) --foreign java/Makefile
.PRECIOUS: Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
@@ -674,81 +630,12 @@ $(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps)
$(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
-ftest.f: $(top_builddir)/config.status $(srcdir)/ftest.f.in
- cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@
-jackets.c: $(top_builddir)/config.status $(srcdir)/jackets.c.in
- cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@
-netcdf.inc: $(top_builddir)/config.status $(srcdir)/netcdf.inc.in
- cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@
-testfortran.sh: $(top_builddir)/config.status $(srcdir)/testfortran.sh.in
- cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@
-
-clean-checkPROGRAMS:
- @list='$(check_PROGRAMS)'; test -n "$$list" || exit 0; \
- echo " rm -f" $$list; \
- rm -f $$list || exit $$?; \
- test -n "$(EXEEXT)" || exit 0; \
- list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
- echo " rm -f" $$list; \
- rm -f $$list
-
-ftest$(EXEEXT): $(ftest_OBJECTS) $(ftest_DEPENDENCIES) $(EXTRA_ftest_DEPENDENCIES)
- @rm -f ftest$(EXEEXT)
- $(AM_V_F77LD)$(F77LINK) $(ftest_OBJECTS) $(ftest_LDADD) $(LIBS)
-
-hdftest$(EXEEXT): $(hdftest_OBJECTS) $(hdftest_DEPENDENCIES) $(EXTRA_hdftest_DEPENDENCIES)
- @rm -f hdftest$(EXEEXT)
- $(AM_V_F77LD)$(F77LINK) $(hdftest_OBJECTS) $(hdftest_LDADD) $(LIBS)
-
-hdftest1$(EXEEXT): $(hdftest1_OBJECTS) $(hdftest1_DEPENDENCIES) $(EXTRA_hdftest1_DEPENDENCIES)
- @rm -f hdftest1$(EXEEXT)
- $(AM_V_F77LD)$(F77LINK) $(hdftest1_OBJECTS) $(hdftest1_LDADD) $(LIBS)
-
-tszip$(EXEEXT): $(tszip_OBJECTS) $(tszip_DEPENDENCIES) $(EXTRA_tszip_DEPENDENCIES)
- @rm -f tszip$(EXEEXT)
- $(AM_V_F77LD)$(F77LINK) $(tszip_OBJECTS) $(tszip_LDADD) $(LIBS)
-
-mostlyclean-compile:
- -rm -f *.$(OBJEXT)
-
-distclean-compile:
- -rm -f *.tab.c
-
-.f.o:
- $(AM_V_F77)$(F77COMPILE) -c -o $@ $<
-
-.f.obj:
- $(AM_V_F77)$(F77COMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
-
-.f.lo:
- $(AM_V_F77)$(LTF77COMPILE) -c -o $@ $<
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
-install-includeHEADERS: $(include_HEADERS)
- @$(NORMAL_INSTALL)
- @list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(includedir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(includedir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_HEADER) $$files '$(DESTDIR)$(includedir)'"; \
- $(INSTALL_HEADER) $$files "$(DESTDIR)$(includedir)" || exit $$?; \
- done
-
-uninstall-includeHEADERS:
- @$(NORMAL_UNINSTALL)
- @list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(includedir)'; $(am__uninstall_files_from_dir)
# This directory's subdirectories are mostly independent; you can cd
# into them and run 'make' without going through this Makefile.
@@ -968,7 +855,7 @@ $(TEST_SUITE_LOG): $(TEST_LOGS)
echo "$$col$$br$$std"; \
fi; \
$$success || exit 1
-recheck: all $(check_PROGRAMS) $(check_SCRIPTS)
+recheck: all
@test -z "$(TEST_SUITE_LOG)" || rm -f $(TEST_SUITE_LOG)
@set +e; $(am__set_TESTS_bases); \
bases=`for i in $$bases; do echo $$i; done \
@@ -1050,15 +937,11 @@ distdir: $(DISTFILES)
fi; \
done
check-am: all-am
- $(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS) $(check_SCRIPTS)
$(MAKE) $(AM_MAKEFLAGS) check-TESTS
check: check-recursive
-all-am: Makefile $(HEADERS) all-local
+all-am: Makefile all-local
installdirs: installdirs-recursive
installdirs-am:
- for dir in "$(DESTDIR)$(includedir)"; do \
- test -z "$$dir" || $(MKDIR_P) "$$dir"; \
- done
install: install-recursive
install-exec: install-exec-recursive
install-data: install-data-recursive
@@ -1088,20 +971,17 @@ clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
- -test -z "$(DISTCLEANFILES)" || rm -f $(DISTCLEANFILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-recursive
-clean-am: clean-checkPROGRAMS clean-generic clean-libtool \
- mostlyclean-am
+clean-am: clean-generic clean-libtool mostlyclean-am
distclean: distclean-recursive
-rm -f Makefile
-distclean-am: clean-am distclean-compile distclean-generic \
- distclean-local distclean-tags
+distclean-am: clean-am distclean-generic distclean-tags
dvi: dvi-recursive
@@ -1115,7 +995,7 @@ info: info-recursive
info-am:
-install-data-am: install-includeHEADERS
+install-data-am:
install-dvi: install-dvi-recursive
@@ -1141,7 +1021,7 @@ install-ps: install-ps-recursive
install-ps-am:
-installcheck-am: installcheck-local
+installcheck-am:
maintainer-clean: maintainer-clean-recursive
-rm -f Makefile
@@ -1149,8 +1029,8 @@ maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-recursive
-mostlyclean-am: mostlyclean-compile mostlyclean-generic \
- mostlyclean-libtool mostlyclean-local
+mostlyclean-am: mostlyclean-generic mostlyclean-libtool \
+ mostlyclean-local
pdf: pdf-recursive
@@ -1160,55 +1040,24 @@ ps: ps-recursive
ps-am:
-uninstall-am: uninstall-includeHEADERS
+uninstall-am:
.MAKE: $(am__recursive_targets) check-am install-am install-strip
.PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am all-local \
- check check-TESTS check-am clean clean-checkPROGRAMS \
- clean-generic clean-libtool cscopelist-am ctags ctags-am \
- distclean distclean-compile distclean-generic \
- distclean-libtool distclean-local distclean-tags distdir dvi \
- dvi-am html html-am info info-am install install-am \
- install-data install-data-am install-dvi install-dvi-am \
- install-exec install-exec-am install-html install-html-am \
- install-includeHEADERS install-info install-info-am \
- install-man install-pdf install-pdf-am install-ps \
- install-ps-am install-strip installcheck installcheck-am \
- installcheck-local installdirs installdirs-am maintainer-clean \
- maintainer-clean-generic mostlyclean mostlyclean-compile \
- mostlyclean-generic mostlyclean-libtool mostlyclean-local pdf \
- pdf-am ps ps-am recheck tags tags-am uninstall uninstall-am \
- uninstall-includeHEADERS
-
-
-testdir:
- -mkdir testdir
-.inc.f90:
- sed -e 's/^[cC]/!/' -e 's/^ [^ ]/ \&/' < $< > $*.f90
-
-# Recurse into examples directory for examples tests.
-installcheck-local:
- @(cd examples && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1;
-
-# 'make install-all' also installs examples
-install-all:
- @$(MAKE) $(AM_MAKEFLAGS) install
- @$(MAKE) $(AM_MAKEFLAGS) install-examples
-uninstall-all:
- @$(MAKE) $(AM_MAKEFLAGS) uninstall
- @$(MAKE) $(AM_MAKEFLAGS) uninstall-examples
-
-# Install examples recursively
-install-examples uninstall-examples:
- @@SETX@; for d in examples; do \
- (cd $$d && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1; \
- done
+ check check-TESTS check-am clean clean-generic clean-libtool \
+ cscopelist-am ctags ctags-am distclean distclean-generic \
+ distclean-libtool distclean-tags distdir dvi dvi-am html \
+ html-am info info-am install install-am install-data \
+ install-data-am install-dvi install-dvi-am install-exec \
+ install-exec-am install-html install-html-am install-info \
+ install-info-am install-man install-pdf install-pdf-am \
+ install-ps install-ps-am install-strip installcheck \
+ installcheck-am installdirs installdirs-am maintainer-clean \
+ maintainer-clean-generic mostlyclean mostlyclean-generic \
+ mostlyclean-libtool mostlyclean-local pdf pdf-am ps ps-am \
+ recheck tags tags-am uninstall uninstall-am
-# Automake's distclean won't remove directories, so we can add an additional
-# hook target which will do so during 'make distclean'.
-distclean-local:
- -rm -rf testdir
# lib/progs/tests targets recurse into subdirectories. build-* targets
# build files in this directory.
diff --git a/java/examples/CMakeLists.txt b/java/examples/CMakeLists.txt
new file mode 100644
index 0000000..e6e4afc
--- /dev/null
+++ b/java/examples/CMakeLists.txt
@@ -0,0 +1,89 @@
+cmake_minimum_required (VERSION 3.1.0)
+project (HDF4_JAVA_Examples Java)
+
+set (CMAKE_VERBOSE_MAKEFILE 1)
+
+INCLUDE_DIRECTORIES (
+ ${HDF4_JAVA_JNI_BINARY_DIR}
+ ${HDF4_JAVA_HDF_LIB_DIR}
+)
+
+set (HDFJNI_EXAMPLES
+ HDF4FileCreate
+ HDF4GroupCreate
+ HDF4DatasetCreate
+)
+
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF4_JAVA_JARS};${HDF4_JAVA_LOGGING_JAR};${HDF4_JAVA_LOGGING_SIMPLE_JAR}")
+
+if (WIN32 AND NOT CYGWIN)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";")
+else (WIN32 AND NOT CYGWIN)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
+endif (WIN32 AND NOT CYGWIN)
+
+set (CMAKE_JAVA_CLASSPATH ".")
+foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
+ set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
+endforeach (CMAKE_INCLUDE_PATH)
+
+foreach (HCP_JAR ${CMAKE_JAVA_INCLUDE_PATH})
+ get_filename_component (_HCP_FILE ${HCP_JAR} NAME)
+ set (HDFJNI_CLASSJARS "${_HCP_FILE} ${HDFJNI_CLASSJARS}")
+endforeach (HCP_JAR ${CMAKE_JAVA_INCLUDE_PATH})
+
+foreach (example ${HDFJNI_EXAMPLES})
+ file (WRITE ${HDF4_JAVA_Examples_BINARY_DIR}/Manifest.txt
+ "Main-Class: examples.${example}
+Class-Path: ${HDFJNI_CLASSJARS}
+"
+ )
+ add_jar (${example} MANIFEST ${HDF4_JAVA_Examples_BINARY_DIR}/Manifest.txt ${example}.java)
+ get_target_property (${example}_JAR_FILE ${example} JAR_FILE)
+ add_dependencies (${example} ${HDF4_JAVA_HDF_LIB_TARGET})
+ set_target_properties (${example} PROPERTIES FOLDER examples/java)
+endforeach (example ${HDFJNI_EXAMPLES})
+
+if (CMAKE_BUILD_TYPE MATCHES Debug)
+ set (CMD_ARGS "-Dhdf.hdflib.HDFLibrary.loadLibraryName=hdf_java_debug;")
+endif(CMAKE_BUILD_TYPE MATCHES Debug)
+
+if (BUILD_TESTING)
+ macro (ADD_H4_TEST resultfile resultcode)
+ add_test (
+ NAME JAVAEX-${resultfile}
+ COMMAND "${CMAKE_COMMAND}"
+ -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}"
+ -D "TEST_PROGRAM=examples.${resultfile}"
+ -D "TEST_ARGS:STRING=${CMD_ARGS}${ARGN}"
+ -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${resultfile}_JAR_FILE}"
+ -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}"
+ -D "TEST_FOLDER=${HDF4_JAVA_Examples_BINARY_DIR}"
+ -D "TEST_OUTPUT=${HDF4_JAVA_Examples_BINARY_DIR}/${resultfile}.out"
+ -D "TEST_EXPECT=${resultcode}"
+ -D "TEST_SKIP_COMPARE=TRUE"
+ -P "${HDF_RESOURCES_DIR}/jrunTest.cmake"
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (JAVAEX-${resultfile} PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ set (last_test "JAVAEX-${resultfile}")
+ endmacro (ADD_H4_TEST file)
+
+ foreach (example ${HDFJNI_EXAMPLES})
+ add_test (
+ NAME JAVAEX-${example}-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ${HDF4_JAVA_Examples_BINARY_DIR}/${example}.hdf
+ ${example}.out
+ ${example}.out.err
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (JAVAEX-${example}-clearall-objects PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ set (last_test "JAVAEX-${example}-clearall-objects")
+ ADD_H4_TEST (${example} 0)
+ endforeach (example ${HDFJNI_EXAMPLES})
+
+endif (BUILD_TESTING)
diff --git a/java/examples/HDF4DatasetCreate.java b/java/examples/HDF4DatasetCreate.java
new file mode 100644
index 0000000..c4e4f96
--- /dev/null
+++ b/java/examples/HDF4DatasetCreate.java
@@ -0,0 +1,203 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Products. The full HDF copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package examples;
+
+import hdf.hdflib.HDFConstants;
+import hdf.hdflib.HDFLibrary;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: this example shows how to create HDF4 datasets using the
+ * "HDF Native Package (Java)". The example creates the group structure and
+ * datasets:
+ *
+ * <pre>
+ * "/" (root)
+ * integer arrays
+ * 2D 32-bit integer 20x10
+ * 3D unsigned 8-bit integer 20x10x5
+ * float arrays
+ * 2D 64-bit double 20x10
+ * 3D 32-bit float 20x10x5
+ * </pre>
+ *
+ * Reference the C Example, VG_add_sds_to_vgroup.
+ * </p>
+ */
+public class HDF4DatasetCreate {
+ private static String fname = "HDF4DatasetCreate.hdf";
+ private static int[] dims2D = { 20, 10 };
+ private static int[] dims3D = { 20, 10, 5 };
+
+ public static void main(String args[]) throws Exception {
+ long file_id = -1;
+ long vgroup_id1 = -1;
+ long vgroup_id2 = -1;
+ long sd_id = -1;
+ long sds_id = -1;
+ int sds_ref = -1;
+
+ // Create a new file using default properties.
+ try {
+ file_id = HDFLibrary.Hopen(fname, HDFConstants.DFACC_CREATE);
+ // Initialize the V interface.
+ HDFLibrary.Vstart(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ System.err.println("Failed to create file:" + fname);
+ return;
+ }
+
+ // Create two vgroups and set their name and class.
+ try {
+ // Create the vgroup. Note that the vgroup reference number is set
+ // to -1 for creating and the access mode is "w" for writing.
+ vgroup_id1 = HDFLibrary.Vattach (file_id, -1, "w");
+ if (vgroup_id1 >= 0) {
+ HDFLibrary.Vsetname(vgroup_id1, "integer arrays");
+ HDFLibrary.Vsetclass (vgroup_id1, "Common Vgroups");
+ }
+ vgroup_id2 = HDFLibrary.Vattach (file_id, -1, "w");
+ if (vgroup_id2 >= 0) {
+ HDFLibrary.Vsetname(vgroup_id2, "float arrays");
+ HDFLibrary.Vsetclass (vgroup_id2, "Common Vgroups");
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Initialize the SD interface.
+ try {
+ sd_id = HDFLibrary.SDstart (fname, HDFConstants.DFACC_WRITE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ if (sd_id > 0) {
+ // create the SDS, 2D 32-bit (4 bytes) integer dataset of 20 by 10
+ try {
+ sds_id = HDFLibrary.SDcreate (sd_id, "2D 32-bit integer 20x10", (long)HDFConstants.DFNT_INT32, 2, dims2D);
+ if (sds_id >= 0) {
+ // Obtain the reference number of the SDS using its identifier.
+ sds_ref = HDFLibrary.SDidtoref(sds_id);
+ System.out.println("sds_ref:" + sds_ref);
+ // Add the SDS to the vgroup. Note: the tag DFTAG_NDG is used
+ // when adding an SDS. Refer to Appendix A for the entire list of tags.
+ HDFLibrary.Vaddtagref(vgroup_id1, HDFConstants.DFTAG_NDG, sds_ref);
+ // Terminate access to the data set.
+ HDFLibrary.SDendaccess(sds_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ // create 3D 8-bit (1 byte) unsigned integer dataset of 20 by 10 by 5
+ sds_id = HDFLibrary.SDcreate (sd_id, "3D 8-bit unsigned integer 20x10x5", (long)HDFConstants.DFNT_INT8, 3, dims3D);
+ if (sds_id >= 0) {
+ // Obtain the reference number of the SDS using its identifier.
+ sds_ref = HDFLibrary.SDidtoref(sds_id);
+ System.out.println("sds_ref:" + sds_ref);
+ // Add the SDS to the vgroup. Note: the tag DFTAG_NDG is used
+ // when adding an SDS. Refer to Appendix A for the entire list of tags.
+ HDFLibrary.Vaddtagref(vgroup_id1, HDFConstants.DFTAG_NDG, sds_ref);
+ // Terminate access to the data set.
+ HDFLibrary.SDendaccess(sds_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ // create 2D 64-bit (8 bytes) double dataset of 20 by 10
+ sds_id = HDFLibrary.SDcreate (sd_id, "2D 64-bit double 20x10", HDFConstants.DFNT_FLOAT64, 2, dims2D);
+ if (sds_id >= 0) {
+ // Obtain the reference number of the SDS using its identifier.
+ sds_ref = HDFLibrary.SDidtoref(sds_id);
+ System.out.println("sds_ref:" + sds_ref);
+ // Add the SDS to the vgroup. Note: the tag DFTAG_NDG is used
+ // when adding an SDS. Refer to Appendix A for the entire list of tags.
+ HDFLibrary.Vaddtagref(vgroup_id2, HDFConstants.DFTAG_NDG, sds_ref);
+ // Terminate access to the data set.
+ HDFLibrary.SDendaccess(sds_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ // create 3D 32-bit (4 bytes) float dataset of 20 by 10 by 5
+ sds_id = HDFLibrary.SDcreate (sd_id, "3D 32-bit float 20x10x5", HDFConstants.DFNT_FLOAT32, 3, dims3D);
+ if (sds_id >= 0) {
+ // Obtain the reference number of the SDS using its identifier.
+ sds_ref = HDFLibrary.SDidtoref(sds_id);
+ System.out.println("sds_ref:" + sds_ref);
+ // Add the SDS to the vgroup. Note: the tag DFTAG_NDG is used
+ // when adding an SDS. Refer to Appendix A for the entire list of tags.
+ HDFLibrary.Vaddtagref(vgroup_id2, HDFConstants.DFTAG_NDG, sds_ref);
+ // Terminate access to the data set.
+ HDFLibrary.SDendaccess(sds_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the SD interface and close the file.
+ try {
+ HDFLibrary.SDend(sd_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ else {
+ System.err.println("Could not initialize SDS interface");
+ }
+ // Close the groups.
+ try {
+ if (vgroup_id2 >= 0)
+ HDFLibrary.Vdetach(vgroup_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (vgroup_id1 >= 0)
+ HDFLibrary.Vdetach(vgroup_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0) {
+ HDFLibrary.Vend (file_id);
+ HDFLibrary.Hclose(file_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/java/examples/HDF4FileCreate.java b/java/examples/HDF4FileCreate.java
new file mode 100644
index 0000000..782fbf3
--- /dev/null
+++ b/java/examples/HDF4FileCreate.java
@@ -0,0 +1,52 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Products. The full HDF copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package examples;
+
+import hdf.hdflib.HDFConstants;
+import hdf.hdflib.HDFLibrary;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: this example shows how to create an empty HDF4 file using the
+ * "HDF Native Package (Java)".
+ * </p>
+ */
+public class HDF4FileCreate {
+ private static String fname = "HDF4FileCreate.hdf";
+
+ public static void main(String args[]) throws Exception {
+ long file_id = -1;
+
+ // Create a new file using default properties.
+ try {
+ file_id = HDFLibrary.Hopen(fname, HDFConstants.DFACC_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ System.err.println("Failed to create file:" + fname);
+ return;
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ HDFLibrary.Hclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/java/examples/HDF4GroupCreate.java b/java/examples/HDF4GroupCreate.java
new file mode 100644
index 0000000..225e788
--- /dev/null
+++ b/java/examples/HDF4GroupCreate.java
@@ -0,0 +1,108 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Products. The full HDF copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package examples;
+
+import hdf.hdflib.HDFConstants;
+import hdf.hdflib.HDFLibrary;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: this example shows how to create HDF4 groups using the
+ * "HDF Native Package (Java)". The example creates the group structure:
+ *
+ * <pre>
+ * "/" (root)
+ * g1
+ * g11
+ * g12
+ * g2
+ * g21
+ * g22
+ * </pre>
+ *
+ * </p>
+ */
+public class HDF4GroupCreate {
+ private static String fname = "HDF4GroupCreate.hdf";
+
+ public static void main(String args[]) throws Exception {
+ long file_id = -1;
+ long subvgroup_id = -1;
+ long vgroup_id1 = -1;
+ long vgroup_id2 = -1;
+
+ // Create a new file using default properties.
+ try {
+ file_id = HDFLibrary.Hopen(fname, HDFConstants.DFACC_CREATE);
+ // Initialize the V interface.
+ if (file_id >= 0)
+ HDFLibrary.Vstart(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ System.err.println("Failed to create file:" + fname);
+ return;
+ }
+
+ try {
+ // Create the vgroup. Note that the vgroup reference number is set
+ // to -1 for creating and the access mode is "w" for writing.
+ if (file_id >= 0) {
+ vgroup_id1 = HDFLibrary.VSattach (file_id, -1, "w");
+ if (vgroup_id1 >= 0) {
+ HDFLibrary.VSsetname(vgroup_id1, "g1");
+ HDFLibrary.VSsetclass (vgroup_id1, "Empty Vdatas");
+ }
+ vgroup_id2 = HDFLibrary.VSattach (file_id, -1, "w");
+ if (vgroup_id2 >= 0) {
+ HDFLibrary.VSsetname(vgroup_id2, "g2");
+ HDFLibrary.VSsetclass (vgroup_id2, "Empty Vdatas");
+ }
+ }
+
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the groups.
+ try {
+ if (vgroup_id2 >= 0)
+ HDFLibrary.VSdetach(vgroup_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (vgroup_id1 >= 0)
+ HDFLibrary.VSdetach(vgroup_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0) {
+ HDFLibrary.Vend (file_id);
+ HDFLibrary.Hclose(file_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/java/examples/Makefile.am b/java/examples/Makefile.am
new file mode 100644
index 0000000..56d422e
--- /dev/null
+++ b/java/examples/Makefile.am
@@ -0,0 +1,48 @@
+#
+# HDF Java native interface (JNI) Library Examples Makefile(.in)
+
+include $(top_srcdir)/config/commence.am
+
+# Mark this directory as part of the JNI API
+JAVA_API=yes
+
+JAVAROOT = .classes
+
+classes:
+ test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
+
+pkgpath = examples
+hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
+jarfile = jar$(PACKAGE_TARNAME)examples.jar
+
+CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-1.7.5.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.5.jar:$$CLASSPATH
+AM_JAVACFLAGS = $(H4_JAVACFLAGS) -deprecation
+
+noinst_JAVA = \
+ HDF4FileCreate.java \
+ HDF4GroupCreate.java \
+ HDF4DatasetCreate.java
+
+$(jarfile): classnoinst.stamp classes
+ $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
+
+noinst_DATA = $(jarfile)
+
+.PHONY: classes
+
+check_SCRIPTS =
+TEST_SCRIPT = $(check_SCRIPTS)
+
+CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class
+
+#JAVA_JUNIT = $(JAVA_SRCS)
+#noinst_JAVA = @JAVA_JUNIT@
+#EXTRA_JAVA = $(JAVA_JUNIT)
+#EXTRA_TEST = $(TESTS_JUNIT)
+
+clean:
+ rm -rf $(JAVAROOT)/*
+ rm -f $(jarfile)
+ rm -f classnoinst.stamp
+
+include $(top_srcdir)/config/conclude.am
diff --git a/mfhdf/nctest/Makefile.in b/java/examples/Makefile.in
similarity index 76%
copy from mfhdf/nctest/Makefile.in
copy to java/examples/Makefile.in
index 4e60508..2c5094e 100644
--- a/mfhdf/nctest/Makefile.in
+++ b/java/examples/Makefile.in
@@ -14,8 +14,9 @@
@SET_MAKE@
-#############################################################################
-#############################################################################
+#
+# HDF Java native interface (JNI) Library Examples Makefile(.in)
+
VPATH = @srcdir@
am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)'
am__make_running_with_option = \
@@ -83,30 +84,30 @@ host_triplet = @host@
DIST_COMMON = $(top_srcdir)/config/commence.am \
$(top_srcdir)/config/conclude.am $(srcdir)/Makefile.in \
$(srcdir)/Makefile.am $(top_srcdir)/bin/mkinstalldirs \
- $(top_srcdir)/bin/depcomp $(top_srcdir)/bin/test-driver README
-check_PROGRAMS = nctest$(EXEEXT)
-TESTS = $(am__EXEEXT_1)
-subdir = mfhdf/nctest
+ $(top_srcdir)/bin/test-driver
+TESTS = $(am__EXEEXT_2)
+subdir = java/examples
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
CONFIG_HEADER = $(top_builddir)/hdf/src/h4config.h
CONFIG_CLEAN_FILES =
CONFIG_CLEAN_VPATH_FILES =
-am_nctest_OBJECTS = add.$(OBJEXT) atttests.$(OBJEXT) \
- cdftests.$(OBJEXT) dimtests.$(OBJEXT) driver.$(OBJEXT) \
- emalloc.$(OBJEXT) error.$(OBJEXT) misctest.$(OBJEXT) \
- rec.$(OBJEXT) slabs.$(OBJEXT) val.$(OBJEXT) \
- varget_unlim.$(OBJEXT) varget.$(OBJEXT) vargetg.$(OBJEXT) \
- varput.$(OBJEXT) varputg.$(OBJEXT) vardef.$(OBJEXT) \
- vartests.$(OBJEXT) vputget.$(OBJEXT) vputgetg.$(OBJEXT)
-nctest_OBJECTS = $(am_nctest_OBJECTS)
-AM_V_lt = $(am__v_lt_ at AM_V@)
-am__v_lt_ = $(am__v_lt_ at AM_DEFAULT_V@)
-am__v_lt_0 = --silent
-am__v_lt_1 =
AM_V_P = $(am__v_P_ at AM_V@)
am__v_P_ = $(am__v_P_ at AM_DEFAULT_V@)
am__v_P_0 = false
@@ -119,54 +120,16 @@ AM_V_at = $(am__v_at_ at AM_V@)
am__v_at_ = $(am__v_at_ at AM_DEFAULT_V@)
am__v_at_0 = @
am__v_at_1 =
-DEFAULT_INCLUDES = -I. at am__isrc@ -I$(top_builddir)/hdf/src
-depcomp = $(SHELL) $(top_srcdir)/bin/depcomp
-am__depfiles_maybe = depfiles
-am__mv = mv -f
-COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
- $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
-LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
- $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \
- $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
- $(AM_CFLAGS) $(CFLAGS)
-AM_V_CC = $(am__v_CC_ at AM_V@)
-am__v_CC_ = $(am__v_CC_ at AM_DEFAULT_V@)
-am__v_CC_0 = @echo " CC " $@;
-am__v_CC_1 =
-CCLD = $(CC)
-LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
- $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
- $(AM_LDFLAGS) $(LDFLAGS) -o $@
-AM_V_CCLD = $(am__v_CCLD_ at AM_V@)
-am__v_CCLD_ = $(am__v_CCLD_ at AM_DEFAULT_V@)
-am__v_CCLD_0 = @echo " CCLD " $@;
-am__v_CCLD_1 =
-SOURCES = $(nctest_SOURCES)
-DIST_SOURCES = $(nctest_SOURCES)
+SOURCES =
+DIST_SOURCES =
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
+am__java_sources = $(noinst_JAVA)
+DATA = $(noinst_DATA)
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
-# Read a list of newline-separated strings from the standard input,
-# and print each of them once, without duplicates. Input order is
-# *not* preserved.
-am__uniquify_input = $(AWK) '\
- BEGIN { nonempty = 0; } \
- { items[$$0] = 1; nonempty = 1; } \
- END { if (nonempty) { for (i in items) print i; }; } \
-'
-# Make sure the list of sources is unique. This is necessary because,
-# e.g., the same source file might be shared among _SOURCES variables
-# for different programs/libraries.
-am__define_uniq_tagged_files = \
- list='$(am__tagged_files)'; \
- unique=`for i in $$list; do \
- if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
- done | $(am__uniquify_input)`
-ETAGS = etags
-CTAGS = ctags
am__tty_colors_dummy = \
mgn= red= grn= lgn= blu= brg= std=; \
am__color_tests=no
@@ -351,10 +314,14 @@ am__set_TESTS_bases = \
bases=`echo $$bases`
RECHECK_LOGS = $(TEST_LOGS)
AM_RECURSIVE_TARGETS = check recheck
-am__EXEEXT_1 = nctest$(EXEEXT)
+am__EXEEXT_1 =
+am__EXEEXT_2 = $(am__EXEEXT_1)
TEST_SUITE_LOG = test-suite.log
-LOG_DRIVER = $(SHELL) $(top_srcdir)/bin/test-driver
-LOG_COMPILE = $(LOG_COMPILER) $(AM_LOG_FLAGS) $(LOG_FLAGS)
+am__test_logs1 = $(TESTS:=.log)
+am__test_logs2 = $(am__test_logs1:@EXEEXT at .log=.log)
+TEST_LOGS = $(am__test_logs2:.sh.log=.log)
+SH_LOG_DRIVER = $(SHELL) $(top_srcdir)/bin/test-driver
+SH_LOG_COMPILE = $(SH_LOG_COMPILER) $(AM_SH_LOG_FLAGS) $(SH_LOG_FLAGS)
am__set_b = \
case '$@' in \
*/*) \
@@ -365,11 +332,6 @@ am__set_b = \
*) \
b='$*';; \
esac
-am__test_logs1 = $(TESTS:=.log)
-am__test_logs2 = $(am__test_logs1:@EXEEXT at .log=.log)
-TEST_LOGS = $(am__test_logs2:.sh.log=.log)
-SH_LOG_DRIVER = $(SHELL) $(top_srcdir)/bin/test-driver
-SH_LOG_COMPILE = $(SH_LOG_COMPILER) $(AM_SH_LOG_FLAGS) $(SH_LOG_FLAGS)
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = @ACLOCAL@
AMTAR = @AMTAR@
@@ -413,12 +375,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -461,11 +438,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
@@ -538,30 +518,25 @@ ACLOCAL_AMFLAGS = "-I m4"
# .chkexe files are used to mark tests that have run successfully.
# .chklog files are output from those tests.
-
-#############################################################################
-#############################################################################
-CHECK_CLEANFILES = *.chkexe *.chklog test2.nc test.nc
-nctest_INCLUDES = -I$(top_srcdir)/hdf/src \
- -I$(top_srcdir)/mfhdf/libsrc \
- -I$(top_builddir)/mfhdf/libsrc
-
-DEFINES = -DNDEBUG -DHDF
-AM_CPPFLAGS = $(nctest_INCLUDES) $(DEFINES)
-
-#############################################################################
-#############################################################################
-TEST_PROG = nctest
-
-# Information for building the "ncgen" program
-nctest_SOURCES = add.c atttests.c cdftests.c dimtests.c driver.c emalloc.c \
- error.c misctest.c rec.c slabs.c val.c varget_unlim.c \
- varget.c vargetg.c varput.c varputg.c vardef.c vartests.c \
- vputget.c vputgetg.c
-
-nctest_LDADD = $(LIBMFHDF) $(LIBHDF)
-nctest_DEPENDENCIES = $(LIBMFHDF) $(LIBHDF)
-DISTCLEANFILES =
+CHECK_CLEANFILES = *.chkexe *.chklog
+
+# Mark this directory as part of the JNI API
+JAVA_API = yes
+JAVAROOT = .classes
+pkgpath = examples
+hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
+jarfile = jar$(PACKAGE_TARNAME)examples.jar
+CLASSPATH_ENV = CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-1.7.5.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.5.jar:$$CLASSPATH
+AM_JAVACFLAGS = $(H4_JAVACFLAGS) -deprecation
+noinst_JAVA = \
+ HDF4FileCreate.java \
+ HDF4GroupCreate.java \
+ HDF4DatasetCreate.java
+
+noinst_DATA = $(jarfile)
+check_SCRIPTS =
+TEST_SCRIPT = $(check_SCRIPTS)
+CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class
# Automake needs to be taught how to build lib, progs, and tests targets.
# These will be filled in automatically for the most part (e.g.,
@@ -583,7 +558,7 @@ TEST_SCRIPT_CHKSH = $(TEST_SCRIPT:=.chkexe_)
all: all-am
.SUFFIXES:
-.SUFFIXES: .c .lo .log .o .obj .sh .sh$(EXEEXT) .trs
+.SUFFIXES: .log .sh .sh$(EXEEXT) .trs
$(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(top_srcdir)/config/commence.am $(top_srcdir)/config/conclude.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
@@ -593,9 +568,9 @@ $(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(top_srcdir
exit 1;; \
esac; \
done; \
- echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign mfhdf/nctest/Makefile'; \
+ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign java/examples/Makefile'; \
$(am__cd) $(top_srcdir) && \
- $(AUTOMAKE) --foreign mfhdf/nctest/Makefile
+ $(AUTOMAKE) --foreign java/examples/Makefile
.PRECIOUS: Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
@@ -616,124 +591,31 @@ $(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
-clean-checkPROGRAMS:
- @list='$(check_PROGRAMS)'; test -n "$$list" || exit 0; \
- echo " rm -f" $$list; \
- rm -f $$list || exit $$?; \
- test -n "$(EXEEXT)" || exit 0; \
- list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
- echo " rm -f" $$list; \
- rm -f $$list
-
-nctest$(EXEEXT): $(nctest_OBJECTS) $(nctest_DEPENDENCIES) $(EXTRA_nctest_DEPENDENCIES)
- @rm -f nctest$(EXEEXT)
- $(AM_V_CCLD)$(LINK) $(nctest_OBJECTS) $(nctest_LDADD) $(LIBS)
-
-mostlyclean-compile:
- -rm -f *.$(OBJEXT)
-
-distclean-compile:
- -rm -f *.tab.c
-
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/add.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/atttests.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/cdftests.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/dimtests.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/driver.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/emalloc.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/error.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/misctest.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/rec.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/slabs.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/val.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/vardef.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/varget.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/varget_unlim.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/vargetg.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/varput.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/varputg.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/vartests.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/vputget.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/vputgetg.Po at am__quote@
-
-.c.o:
- at am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
- at am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
- at AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
- at AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
- at am__fastdepCC_FALSE@ $(AM_V_CC at am__nodep@)$(COMPILE) -c -o $@ $<
-
-.c.obj:
- at am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
- at am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
- at AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
- at AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
- at am__fastdepCC_FALSE@ $(AM_V_CC at am__nodep@)$(COMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
-
-.c.lo:
- at am__fastdepCC_TRUE@ $(AM_V_CC)$(LTCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
- at am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo
- at AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
- at AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
- at am__fastdepCC_FALSE@ $(AM_V_CC at am__nodep@)$(LTCOMPILE) -c -o $@ $<
-
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
-ID: $(am__tagged_files)
- $(am__define_uniq_tagged_files); mkid -fID $$unique
-tags: tags-am
-TAGS: tags
-
-tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
- set x; \
- here=`pwd`; \
- $(am__define_uniq_tagged_files); \
- shift; \
- if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
- test -n "$$unique" || unique=$$empty_fix; \
- if test $$# -gt 0; then \
- $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
- "$$@" $$unique; \
- else \
- $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
- $$unique; \
- fi; \
- fi
-ctags: ctags-am
-
-CTAGS: ctags
-ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
- $(am__define_uniq_tagged_files); \
- test -z "$(CTAGS_ARGS)$$unique" \
- || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
- $$unique
-
-GTAGS:
- here=`$(am__cd) $(top_builddir) && pwd` \
- && $(am__cd) $(top_srcdir) \
- && gtags -i $(GTAGS_ARGS) "$$here"
-cscopelist: cscopelist-am
-
-cscopelist-am: $(am__tagged_files)
- list='$(am__tagged_files)'; \
- case "$(srcdir)" in \
- [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
- *) sdir=$(subdir)/$(srcdir) ;; \
- esac; \
- for i in $$list; do \
- if test -f "$$i"; then \
- echo "$(subdir)/$$i"; \
- else \
- echo "$$sdir/$$i"; \
- fi; \
- done >> $(top_builddir)/cscope.files
+classnoinst.stamp: $(am__java_sources)
+ @list1='$?'; list2=; if test -n "$$list1"; then \
+ for p in $$list1; do \
+ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \
+ list2="$$list2 $$d$$p"; \
+ done; \
+ echo '$(CLASSPATH_ENV) $(JAVAC) -d $(JAVAROOT) $(AM_JAVACFLAGS) $(JAVACFLAGS) '"$$list2"; \
+ $(CLASSPATH_ENV) $(JAVAC) -d $(JAVAROOT) $(AM_JAVACFLAGS) $(JAVACFLAGS) $$list2; \
+ else :; fi
+ echo timestamp > $@
+
+clean-noinstJAVA:
+ -rm -f *.class classnoinst.stamp
+tags TAGS:
+
+ctags CTAGS:
+
+cscope cscopelist:
-distclean-tags:
- -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
# Recover from deleted '.trs' file; this should ensure that
# "rm -f foo.log; make foo.trs" re-run 'foo.test', and re-create
@@ -854,7 +736,7 @@ $(TEST_SUITE_LOG): $(TEST_LOGS)
echo "$$col$$br$$std"; \
fi; \
$$success || exit 1
-recheck: all $(check_PROGRAMS)
+recheck: all $(check_SCRIPTS)
@test -z "$(TEST_SUITE_LOG)" || rm -f $(TEST_SUITE_LOG)
@set +e; $(am__set_TESTS_bases); \
bases=`for i in $$bases; do echo $$i; done \
@@ -865,13 +747,6 @@ recheck: all $(check_PROGRAMS)
am__force_recheck=am--force-recheck \
TEST_LOGS="$$log_list"; \
exit $$?
-nctest.log: nctest$(EXEEXT)
- @p='nctest$(EXEEXT)'; \
- b='nctest'; \
- $(am__check_pre) $(LOG_DRIVER) --test-name "$$f" \
- --log-file $$b.log --trs-file $$b.trs \
- $(am__common_driver_flags) $(AM_LOG_DRIVER_FLAGS) $(LOG_DRIVER_FLAGS) -- $(LOG_COMPILE) \
- "$$tst" $(AM_TESTS_FD_REDIRECT)
.sh.log:
@p='$<'; \
$(am__set_b); \
@@ -918,10 +793,10 @@ distdir: $(DISTFILES)
fi; \
done
check-am: all-am
- $(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS)
+ $(MAKE) $(AM_MAKEFLAGS) $(check_SCRIPTS)
$(MAKE) $(AM_MAKEFLAGS) check-TESTS
check: check-am
-all-am: Makefile all-local
+all-am: Makefile classnoinst.stamp $(DATA) all-local
installdirs:
install: install-am
install-exec: install-exec-am
@@ -948,25 +823,20 @@ mostlyclean-generic:
-test -z "$(TEST_SUITE_LOG)" || rm -f $(TEST_SUITE_LOG)
clean-generic:
+ -test -z "$(CLEANFILES)" || rm -f $(CLEANFILES)
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
- -test -z "$(DISTCLEANFILES)" || rm -f $(DISTCLEANFILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
-clean: clean-am
-
-clean-am: clean-checkPROGRAMS clean-generic clean-libtool \
- mostlyclean-am
+clean-am: clean-generic clean-libtool clean-noinstJAVA mostlyclean-am
distclean: distclean-am
- -rm -rf ./$(DEPDIR)
-rm -f Makefile
-distclean-am: clean-am distclean-compile distclean-generic \
- distclean-tags
+distclean-am: clean-am distclean-generic
dvi: dvi-am
@@ -1009,14 +879,13 @@ install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
- -rm -rf ./$(DEPDIR)
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-am
-mostlyclean-am: mostlyclean-compile mostlyclean-generic \
- mostlyclean-libtool mostlyclean-local
+mostlyclean-am: mostlyclean-generic mostlyclean-libtool \
+ mostlyclean-local
pdf: pdf-am
@@ -1030,20 +899,37 @@ uninstall-am:
.MAKE: check-am install-am install-strip
-.PHONY: CTAGS GTAGS TAGS all all-am all-local check check-TESTS \
- check-am clean clean-checkPROGRAMS clean-generic clean-libtool \
- cscopelist-am ctags ctags-am distclean distclean-compile \
- distclean-generic distclean-libtool distclean-tags distdir dvi \
- dvi-am html html-am info info-am install install-am \
+.PHONY: all all-am all-local check check-TESTS check-am clean \
+ clean-generic clean-libtool clean-noinstJAVA cscopelist-am \
+ ctags-am distclean distclean-generic distclean-libtool distdir \
+ dvi dvi-am html html-am info info-am install install-am \
install-data install-data-am install-dvi install-dvi-am \
install-exec install-exec-am install-html install-html-am \
install-info install-info-am install-man install-pdf \
install-pdf-am install-ps install-ps-am install-strip \
installcheck installcheck-am installdirs maintainer-clean \
- maintainer-clean-generic mostlyclean mostlyclean-compile \
- mostlyclean-generic mostlyclean-libtool mostlyclean-local pdf \
- pdf-am ps ps-am recheck tags tags-am uninstall uninstall-am
+ maintainer-clean-generic mostlyclean mostlyclean-generic \
+ mostlyclean-libtool mostlyclean-local pdf pdf-am ps ps-am \
+ recheck tags-am uninstall uninstall-am
+
+
+classes:
+ test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
+
+$(jarfile): classnoinst.stamp classes
+ $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
+
+.PHONY: classes
+
+#JAVA_JUNIT = $(JAVA_SRCS)
+#noinst_JAVA = @JAVA_JUNIT@
+#EXTRA_JAVA = $(JAVA_JUNIT)
+#EXTRA_TEST = $(TESTS_JUNIT)
+clean:
+ rm -rf $(JAVAROOT)/*
+ rm -f $(jarfile)
+ rm -f classnoinst.stamp
# lib/progs/tests targets recurse into subdirectories. build-* targets
# build files in this directory.
diff --git a/java/lib/ext/slf4j-nop-1.7.5.jar b/java/lib/ext/slf4j-nop-1.7.5.jar
new file mode 100644
index 0000000..e55bdd8
Binary files /dev/null and b/java/lib/ext/slf4j-nop-1.7.5.jar differ
diff --git a/java/lib/ext/slf4j-simple-1.7.5.jar b/java/lib/ext/slf4j-simple-1.7.5.jar
new file mode 100644
index 0000000..9dece31
Binary files /dev/null and b/java/lib/ext/slf4j-simple-1.7.5.jar differ
diff --git a/java/lib/hamcrest-core.jar b/java/lib/hamcrest-core.jar
new file mode 100644
index 0000000..9d5fe16
Binary files /dev/null and b/java/lib/hamcrest-core.jar differ
diff --git a/java/lib/junit.jar b/java/lib/junit.jar
new file mode 100644
index 0000000..aaf7444
Binary files /dev/null and b/java/lib/junit.jar differ
diff --git a/java/lib/simplelogger.properties b/java/lib/simplelogger.properties
new file mode 100644
index 0000000..119ee92
--- /dev/null
+++ b/java/lib/simplelogger.properties
@@ -0,0 +1,36 @@
+# SLF4J's SimpleLogger configuration file
+# Simple implementation of Logger that sends all enabled log messages, for all defined loggers, to System.err.
+
+org.slf4j.simpleLogger.logFile=slf4j.simpleLogger.log
+
+# Default logging detail level for all instances of SimpleLogger.
+# Must be one of ("trace", "debug", "info", "warn", or "error").
+# If not specified, defaults to "info".
+org.slf4j.simpleLogger.defaultLog=trace
+
+# Logging detail level for a SimpleLogger instance named "xxxxx".
+# Must be one of ("trace", "debug", "info", "warn", or "error").
+# If not specified, the default logging detail level is used.
+#org.slf4j.simpleLogger.log.xxxxx=
+
+# Set to true if you want the current date and time to be included in output messages.
+# Default is false, and will output the number of milliseconds elapsed since startup.
+#org.slf4j.simpleLogger.showDateTime=false
+
+# The date and time format to be used in the output messages.
+# The pattern describing the date and time format is the same that is used in java.text.SimpleDateFormat.
+# If the format is not specified or is invalid, the default format is used.
+# The default format is yyyy-MM-dd HH:mm:ss:SSS Z.
+org.slf4j.simpleLogger.dateTimeFormat=yyyy-MM-dd HH:mm:ss:SSS Z
+
+# Set to true if you want to output the current thread name.
+# Defaults to true.
+org.slf4j.simpleLogger.showThreadName=true
+
+# Set to true if you want the Logger instance name to be included in output messages.
+# Defaults to true.
+org.slf4j.simpleLogger.showLogName=true
+
+# Set to true if you want the last component of the name to be included in output messages.
+# Defaults to false.
+org.slf4j.simpleLogger.showShortLogName=false
diff --git a/java/lib/slf4j-api-1.7.5.jar b/java/lib/slf4j-api-1.7.5.jar
new file mode 100644
index 0000000..8766455
Binary files /dev/null and b/java/lib/slf4j-api-1.7.5.jar differ
diff --git a/java/src/CMakeLists.txt b/java/src/CMakeLists.txt
new file mode 100644
index 0000000..e79f6e5
--- /dev/null
+++ b/java/src/CMakeLists.txt
@@ -0,0 +1,8 @@
+cmake_minimum_required(VERSION 3.1.0)
+PROJECT ( HDF4_JAVA_SRC C Java )
+
+#-----------------------------------------------------------------------------
+# Traverse source subdirectory
+#-----------------------------------------------------------------------------
+add_subdirectory (${HDF4_JAVA_SRC_SOURCE_DIR}/jni ${HDF4_JAVA_SRC_BINARY_DIR}/jni)
+add_subdirectory (${HDF4_JAVA_SRC_SOURCE_DIR}/hdf ${HDF4_JAVA_SRC_BINARY_DIR}/hdf)
diff --git a/java/src/Makefile.am b/java/src/Makefile.am
new file mode 100644
index 0000000..12310fc
--- /dev/null
+++ b/java/src/Makefile.am
@@ -0,0 +1,73 @@
+#
+# HDF Java native interface (JNI) Library Makefile(.in)
+
+include $(top_srcdir)/config/commence.am
+
+# Mark this directory as part of the JNI API
+JAVA_API=yes
+
+SUBDIRS=jni
+
+JAVAROOT = .classes
+
+classes:
+ test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
+
+jarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
+hdf_javadir = $(libdir)
+
+pkgpath = hdf/hdflib
+CLASSPATH_ENV=CLASSPATH=.:$(top_srcdir)/java/lib/slf4j-api-1.7.5.jar:$$CLASSPATH
+
+AM_JAVACFLAGS = $(H4_JAVACFLAGS) -deprecation
+
+hdf_java_JAVA = \
+ ${pkgpath}/HDFException.java \
+ ${pkgpath}/HDFJavaException.java \
+ ${pkgpath}/HDFArray.java \
+ ${pkgpath}/HDFChunkInfo.java \
+ ${pkgpath}/HDFCompInfo.java \
+ ${pkgpath}/HDFConstants.java \
+ ${pkgpath}/HDFDeflateCompInfo.java \
+ ${pkgpath}/HDFIMCOMPCompInfo.java \
+ ${pkgpath}/HDFJPEGCompInfo.java \
+ ${pkgpath}/HDFLibrary.java \
+ ${pkgpath}/HDFLibraryException.java \
+ ${pkgpath}/HDFNativeData.java \
+ ${pkgpath}/HDFNewCompInfo.java \
+ ${pkgpath}/HDFNBITChunkInfo.java \
+ ${pkgpath}/HDFNBITCompInfo.java \
+ ${pkgpath}/HDFNotImplementedException.java \
+ ${pkgpath}/HDFOldCompInfo.java \
+ ${pkgpath}/HDFOldRLECompInfo.java \
+ ${pkgpath}/HDFRLECompInfo.java \
+ ${pkgpath}/HDFSKPHUFFCompInfo.java \
+ ${pkgpath}/HDFSZIPCompInfo.java
+
+
+$(jarfile): classhdf_java.stamp classes docs
+ $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
+
+hdf_java_DATA = $(jarfile)
+
+.PHONY: docs classes
+
+WINDOWTITLE = 'HDF Java'
+DOCTITLE = '<h1>HDF Java Wrapper</h1>'
+SRCDIR = '$(pkgpath)'
+
+docs:
+ $(JAVADOC) -sourcepath $(srcdir) -d javadoc -use -splitIndex -windowtitle $(WINDOWTITLE) -doctitle $(DOCTITLE) -J-Xmx180m -verbose -overview $(top_srcdir)/java/src/hdf/overview.html -classpath $(CLASSPATH_ENV) hdf.hdflib
+
+CLEANFILES = classhdf_java.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class
+
+clean:
+ rm -rf $(JAVAROOT)/*
+ rm -f $(jarfile)
+ rm -f classhdf_java.stamp
+
+
+# Clean examples when check-clean is invoked
+check-clean :: ;
+
+#include $(top_srcdir)/config/conclude.am
diff --git a/hdf/Makefile.in b/java/src/Makefile.in
similarity index 71%
copy from hdf/Makefile.in
copy to java/src/Makefile.in
index 8a4e0d1..685d1ba 100644
--- a/hdf/Makefile.in
+++ b/java/src/Makefile.in
@@ -13,6 +13,10 @@
# PARTICULAR PURPOSE.
@SET_MAKE@
+
+#
+# HDF Java native interface (JNI) Library Makefile(.in)
+
VPATH = @srcdir@
am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)'
am__make_running_with_option = \
@@ -79,9 +83,22 @@ build_triplet = @build@
host_triplet = @host@
DIST_COMMON = $(top_srcdir)/config/commence.am $(srcdir)/Makefile.in \
$(srcdir)/Makefile.am $(top_srcdir)/bin/mkinstalldirs
-subdir = hdf
+subdir = java/src
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -115,6 +132,37 @@ am__can_run_installinfo = \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
+am__installdirs = "$(DESTDIR)$(hdf_javadir)" \
+ "$(DESTDIR)$(hdf_javadir)"
+am__java_sources = $(hdf_java_JAVA)
+am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
+am__vpath_adj = case $$p in \
+ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
+ *) f=$$p;; \
+ esac;
+am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
+am__install_max = 40
+am__nobase_strip_setup = \
+ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
+am__nobase_strip = \
+ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
+am__nobase_list = $(am__nobase_strip_setup); \
+ for p in $$list; do echo "$$p $$p"; done | \
+ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
+ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
+ if (++n[$$2] == $(am__install_max)) \
+ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
+ END { for (dir in files) print dir, files[dir] }'
+am__base_list = \
+ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
+ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
+am__uninstall_files_from_dir = { \
+ test -z "$$files" \
+ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
+ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \
+ $(am__cd) "$$dir" && rm -f $$files; }; \
+ }
+DATA = $(hdf_java_DATA)
RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \
distclean-recursive maintainer-clean-recursive
am__recursive_targets = \
@@ -142,6 +190,7 @@ am__define_uniq_tagged_files = \
done | $(am__uniquify_input)`
ETAGS = etags
CTAGS = ctags
+DIST_SUBDIRS = $(SUBDIRS)
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
am__relativize = \
dir0=`pwd`; \
@@ -210,12 +259,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -258,11 +322,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
@@ -336,12 +403,44 @@ ACLOCAL_AMFLAGS = "-I m4"
# .chkexe files are used to mark tests that have run successfully.
# .chklog files are output from those tests.
CHECK_CLEANFILES = *.chkexe *.chklog
- at HDF_BUILD_FORTRAN_FALSE@FORTRAN_DIR =
- at HDF_BUILD_FORTRAN_TRUE@FORTRAN_DIR = fortran
-# src folder in root Makefile, build other folders now
-SUBDIRS = src $(FORTRAN_DIR) test util
-DIST_SUBDIRS = src fortran test util examples
+# Mark this directory as part of the JNI API
+JAVA_API = yes
+SUBDIRS = jni
+JAVAROOT = .classes
+jarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
+hdf_javadir = $(libdir)
+pkgpath = hdf/hdflib
+CLASSPATH_ENV = CLASSPATH=.:$(top_srcdir)/java/lib/slf4j-api-1.7.5.jar:$$CLASSPATH
+AM_JAVACFLAGS = $(H4_JAVACFLAGS) -deprecation
+hdf_java_JAVA = \
+ ${pkgpath}/HDFException.java \
+ ${pkgpath}/HDFJavaException.java \
+ ${pkgpath}/HDFArray.java \
+ ${pkgpath}/HDFChunkInfo.java \
+ ${pkgpath}/HDFCompInfo.java \
+ ${pkgpath}/HDFConstants.java \
+ ${pkgpath}/HDFDeflateCompInfo.java \
+ ${pkgpath}/HDFIMCOMPCompInfo.java \
+ ${pkgpath}/HDFJPEGCompInfo.java \
+ ${pkgpath}/HDFLibrary.java \
+ ${pkgpath}/HDFLibraryException.java \
+ ${pkgpath}/HDFNativeData.java \
+ ${pkgpath}/HDFNewCompInfo.java \
+ ${pkgpath}/HDFNBITChunkInfo.java \
+ ${pkgpath}/HDFNBITCompInfo.java \
+ ${pkgpath}/HDFNotImplementedException.java \
+ ${pkgpath}/HDFOldCompInfo.java \
+ ${pkgpath}/HDFOldRLECompInfo.java \
+ ${pkgpath}/HDFRLECompInfo.java \
+ ${pkgpath}/HDFSKPHUFFCompInfo.java \
+ ${pkgpath}/HDFSZIPCompInfo.java
+
+hdf_java_DATA = $(jarfile)
+WINDOWTITLE = 'HDF Java'
+DOCTITLE = '<h1>HDF Java Wrapper</h1>'
+SRCDIR = '$(pkgpath)'
+CLEANFILES = classhdf_java.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class
all: all-recursive
.SUFFIXES:
@@ -354,9 +453,9 @@ $(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(top_srcdir
exit 1;; \
esac; \
done; \
- echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign hdf/Makefile'; \
+ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign java/src/Makefile'; \
$(am__cd) $(top_srcdir) && \
- $(AUTOMAKE) --foreign hdf/Makefile
+ $(AUTOMAKE) --foreign java/src/Makefile
.PRECIOUS: Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
@@ -383,6 +482,56 @@ mostlyclean-libtool:
clean-libtool:
-rm -rf .libs _libs
+classhdf_java.stamp: $(am__java_sources)
+ @list1='$?'; list2=; if test -n "$$list1"; then \
+ for p in $$list1; do \
+ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \
+ list2="$$list2 $$d$$p"; \
+ done; \
+ echo '$(CLASSPATH_ENV) $(JAVAC) -d $(JAVAROOT) $(AM_JAVACFLAGS) $(JAVACFLAGS) '"$$list2"; \
+ $(CLASSPATH_ENV) $(JAVAC) -d $(JAVAROOT) $(AM_JAVACFLAGS) $(JAVACFLAGS) $$list2; \
+ else :; fi
+ echo timestamp > $@
+install-hdf_javaJAVA: classhdf_java.stamp
+ @$(NORMAL_INSTALL)
+ @test -n "$(hdf_java_JAVA)" && test -n "$(hdf_javadir)" || exit 0; \
+ echo " $(MKDIR_P) '$(DESTDIR)$(hdf_javadir)'"; \
+ $(MKDIR_P) "$(DESTDIR)$(hdf_javadir)"; \
+ set x *.class; shift; test "$$1" != "*.class" || exit 0; \
+ echo " $(INSTALL_DATA)" "$$@" "'$(DESTDIR)$(hdf_javadir)/$$p'"; \
+ $(INSTALL_DATA) "$$@" "$(DESTDIR)$(hdf_javadir)"
+
+uninstall-hdf_javaJAVA:
+ @$(NORMAL_UNINSTALL)
+ @test -n "$(hdf_java_JAVA)" && test -n "$(hdf_javadir)" || exit 0; \
+ set x *.class; shift; test "$$1" != "*.class" || exit 0; \
+ echo " ( cd '$(DESTDIR)$(hdf_javadir)' && rm -f" "$$@" ")"; \
+ cd "$(DESTDIR)$(hdf_javadir)" && rm -f "$$@"
+
+clean-hdf_javaJAVA:
+ -rm -f *.class classhdf_java.stamp
+install-hdf_javaDATA: $(hdf_java_DATA)
+ @$(NORMAL_INSTALL)
+ @list='$(hdf_java_DATA)'; test -n "$(hdf_javadir)" || list=; \
+ if test -n "$$list"; then \
+ echo " $(MKDIR_P) '$(DESTDIR)$(hdf_javadir)'"; \
+ $(MKDIR_P) "$(DESTDIR)$(hdf_javadir)" || exit 1; \
+ fi; \
+ for p in $$list; do \
+ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+ echo "$$d$$p"; \
+ done | $(am__base_list) | \
+ while read files; do \
+ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(hdf_javadir)'"; \
+ $(INSTALL_DATA) $$files "$(DESTDIR)$(hdf_javadir)" || exit $$?; \
+ done
+
+uninstall-hdf_javaDATA:
+ @$(NORMAL_UNINSTALL)
+ @list='$(hdf_java_DATA)'; test -n "$(hdf_javadir)" || list=; \
+ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
+ dir='$(DESTDIR)$(hdf_javadir)'; $(am__uninstall_files_from_dir)
+
# This directory's subdirectories are mostly independent; you can cd
# into them and run 'make' without going through this Makefile.
# To change the values of 'make' variables: instead of editing Makefiles,
@@ -539,9 +688,12 @@ distdir: $(DISTFILES)
done
check-am: all-am
check: check-recursive
-all-am: Makefile
+all-am: Makefile classhdf_java.stamp $(DATA)
installdirs: installdirs-recursive
installdirs-am:
+ for dir in "$(DESTDIR)$(hdf_javadir)" "$(DESTDIR)$(hdf_javadir)"; do \
+ test -z "$$dir" || $(MKDIR_P) "$$dir"; \
+ done
install: install-recursive
install-exec: install-exec-recursive
install-data: install-data-recursive
@@ -564,6 +716,7 @@ install-strip:
mostlyclean-generic:
clean-generic:
+ -test -z "$(CLEANFILES)" || rm -f $(CLEANFILES)
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
@@ -572,9 +725,8 @@ distclean-generic:
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
-clean: clean-recursive
-
-clean-am: clean-generic clean-libtool mostlyclean-am
+clean-am: clean-generic clean-hdf_javaJAVA clean-libtool \
+ mostlyclean-am
distclean: distclean-recursive
-rm -f Makefile
@@ -592,7 +744,7 @@ info: info-recursive
info-am:
-install-data-am:
+install-data-am: install-hdf_javaDATA install-hdf_javaJAVA
install-dvi: install-dvi-recursive
@@ -618,7 +770,7 @@ install-ps: install-ps-recursive
install-ps-am:
-installcheck-am: installcheck-local
+installcheck-am:
maintainer-clean: maintainer-clean-recursive
-rm -f Makefile
@@ -636,41 +788,46 @@ ps: ps-recursive
ps-am:
-uninstall-am:
+uninstall-am: uninstall-hdf_javaDATA uninstall-hdf_javaJAVA
.MAKE: $(am__recursive_targets) install-am install-strip
.PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \
- check-am clean clean-generic clean-libtool cscopelist-am ctags \
- ctags-am distclean distclean-generic distclean-libtool \
- distclean-tags distdir dvi dvi-am html html-am info info-am \
- install install-am install-data install-data-am install-dvi \
- install-dvi-am install-exec install-exec-am install-html \
- install-html-am install-info install-info-am install-man \
- install-pdf install-pdf-am install-ps install-ps-am \
- install-strip installcheck installcheck-am installcheck-local \
+ check-am clean clean-generic clean-hdf_javaJAVA clean-libtool \
+ cscopelist-am ctags ctags-am distclean distclean-generic \
+ distclean-libtool distclean-tags distdir dvi dvi-am html \
+ html-am info info-am install install-am install-data \
+ install-data-am install-dvi install-dvi-am install-exec \
+ install-exec-am install-hdf_javaDATA install-hdf_javaJAVA \
+ install-html install-html-am install-info install-info-am \
+ install-man install-pdf install-pdf-am install-ps \
+ install-ps-am install-strip installcheck installcheck-am \
installdirs installdirs-am maintainer-clean \
maintainer-clean-generic mostlyclean mostlyclean-generic \
mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \
- uninstall-am
+ uninstall-am uninstall-hdf_javaDATA uninstall-hdf_javaJAVA
-installcheck-local:
- @(cd examples && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1;
+classes:
+ test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
-# 'make install-all' also installs examples
-install-all:
- @$(MAKE) $(AM_MAKEFLAGS) install
- @$(MAKE) $(AM_MAKEFLAGS) install-examples
-uninstall-all:
- @$(MAKE) $(AM_MAKEFLAGS) uninstall
- @$(MAKE) $(AM_MAKEFLAGS) uninstall-examples
+$(jarfile): classhdf_java.stamp classes docs
+ $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
-# Install examples recursively
-install-examples uninstall-examples:
- @@SETX@; for d in examples $(FORTRAN_DIR); do \
- (cd $$d && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1; \
- done
+.PHONY: docs classes
+
+docs:
+ $(JAVADOC) -sourcepath $(srcdir) -d javadoc -use -splitIndex -windowtitle $(WINDOWTITLE) -doctitle $(DOCTITLE) -J-Xmx180m -verbose -overview $(top_srcdir)/java/src/hdf/overview.html -classpath $(CLASSPATH_ENV) hdf.hdflib
+
+clean:
+ rm -rf $(JAVAROOT)/*
+ rm -f $(jarfile)
+ rm -f classhdf_java.stamp
+
+# Clean examples when check-clean is invoked
+check-clean :: ;
+
+#include $(top_srcdir)/config/conclude.am
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
diff --git a/java/src/hdf/CMakeLists.txt b/java/src/hdf/CMakeLists.txt
new file mode 100644
index 0000000..1bfc617
--- /dev/null
+++ b/java/src/hdf/CMakeLists.txt
@@ -0,0 +1,4 @@
+cmake_minimum_required (VERSION 3.1.0)
+PROJECT (HDF4_JAVA_HDF)
+
+add_subdirectory (${HDF4_JAVA_HDF_SOURCE_DIR}/hdflib hdflib)
diff --git a/java/src/hdf/hdflib/CMakeLists.txt b/java/src/hdf/hdflib/CMakeLists.txt
new file mode 100644
index 0000000..3ddeffa
--- /dev/null
+++ b/java/src/hdf/hdflib/CMakeLists.txt
@@ -0,0 +1,74 @@
+cmake_minimum_required (VERSION 3.1.0)
+PROJECT (HDF4_JAVA_HDF_HDF4 Java)
+
+set (CMAKE_VERBOSE_MAKEFILE 1)
+
+INCLUDE_DIRECTORIES (
+ ${HDF4_JAVA_HDF_HDF4_SOURCE_DIR}
+ ${HDF4_JAVA_HDF_HDF4_BINARY_DIR}
+ ${HDF4_JAVA_LIB_DIR}
+ ${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/${HDF4_JAVA_HDF_LIB_CORENAME}.dir/hdf/hdflib
+)
+
+SET_GLOBAL_VARIABLE (HDF4_JAVA_SOURCE_PACKAGES
+ "${HDF4_JAVA_SOURCE_PACKAGES};hdf.hdflib"
+)
+
+set (HDF4_JAVA_HDF_HDF4_SRCS
+ HDFException.java
+ HDFJavaException.java
+ HDFArray.java
+ HDFChunkInfo.java
+ HDFCompInfo.java
+ HDFConstants.java
+ HDFDeflateCompInfo.java
+ HDFIMCOMPCompInfo.java
+ HDFJPEGCompInfo.java
+ HDFLibrary.java
+ HDFLibraryException.java
+ HDFNativeData.java
+ HDFNewCompInfo.java
+ HDFNBITChunkInfo.java
+ HDFNBITCompInfo.java
+ HDFNotImplementedException.java
+ HDFOldCompInfo.java
+ HDFOldRLECompInfo.java
+ HDFRLECompInfo.java
+ HDFSKPHUFFCompInfo.java
+ HDFSZIPCompInfo.java
+)
+
+set (CMAKE_JNI_TARGET TRUE)
+
+file (WRITE ${PROJECT_BINARY_DIR}/Manifest.txt
+"
+"
+)
+
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF4_JAVA_LOGGING_JAR}")
+
+add_jar (${HDF4_JAVA_HDF_LIB_TARGET} OUTPUT_NAME "${HDF4_JAVA_HDF_LIB_TARGET}-${HDF4_PACKAGE_VERSION}" OUTPUT_DIR ${CMAKE_JAVA_TARGET_OUTPUT_DIR} MANIFEST ${PROJECT_BINARY_DIR}/Manifest.txt ${HDF4_JAVA_HDF_HDF4_SRCS})
+install_jar (${HDF4_JAVA_HDF_LIB_TARGET} LIBRARY DESTINATION ${HDF4_INSTALL_JAR_DIR} COMPONENT libraries)
+#if (NOT WIN32)
+# install_jni_symlink (${HDF4_JAVA_HDF_LIB_TARGET} ${HDF4_INSTALL_JAR_DIR} libraries)
+#endif (NOT WIN32)
+
+get_target_property (${HDF4_JAVA_HDF_LIB_TARGET}_JAR_FILE ${HDF4_JAVA_HDF_LIB_TARGET} JAR_FILE)
+SET_GLOBAL_VARIABLE (HDF4_JAVA_JARS_TO_EXPORT "${HDF4_JAVA_JARS_TO_EXPORT};${${HDF4_JAVA_HDF_LIB_TARGET}_JAR_FILE}")
+SET_GLOBAL_VARIABLE (HDF4_JAVA_JARS ${${HDF4_JAVA_HDF_LIB_TARGET}_JAR_FILE})
+
+add_dependencies (${HDF4_JAVA_HDF_LIB_TARGET} ${HDF4_JAVA_JNI_LIB_TARGET})
+set_target_properties (${HDF4_JAVA_HDF_LIB_TARGET} PROPERTIES FOLDER libraries/java)
+
+create_javadoc(hdf4_java_doc
+ FILES ${HDF4_JAVA_HDF_HDF4_SRCS}
+ OVERVIEW ${HDF4_JAVA_HDF_SRC_DIR}/overview.html
+ CLASSPATH ${CMAKE_JAVA_INCLUDE_PATH}
+ WINDOWTITLE "HDF Java"
+ DOCTITLE "<h1>HDF Java Wrapper</h1>"
+ INSTALLPATH ${HDF4_INSTALL_DATA_DIR}
+ AUTHOR TRUE
+ USE TRUE
+ VERSION TRUE
+)
+
diff --git a/java/src/hdf/hdflib/HDFArray.java b/java/src/hdf/hdflib/HDFArray.java
new file mode 100644
index 0000000..51f39f5
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFArray.java
@@ -0,0 +1,854 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+/**
+ * This is a class for handling multidimensional arrays for
+ * HDF.
+ * <p>
+ * The purpose is to allow the storage and retrieval of
+ * arbitrary array types containing scientific data.
+ * <p>
+ * The methods support the conversion of an array to and
+ * from Java to a one-dimensional array of bytes suitable
+ * for I/O by the C library.
+ * <p>
+ * This class heavily uses the <a href="./hdf.hdflib.HDFNativeData.html">HDFNativeData</a>
+ * class to convert between Java and C representations.
+ */
+
+public class HDFArray {
+
+private Object _theArray = null;
+private ArrayDescriptor _desc = null;
+private byte [] _barray = null;
+
+public HDFArray(Object anArray) throws HDFException {
+
+ if (anArray == null) {
+ HDFException ex =
+ new HDFJavaException("HDFArray: array is null?: ");
+ex.printStackTrace();
+ }
+ Class tc = anArray.getClass();
+ if (tc.isArray() == false) {
+ /* exception: not an array */
+ HDFException ex =
+ new HDFJavaException("HDFArray: not an array?: ");
+ex.printStackTrace();
+ throw(ex);
+ }
+ _theArray = anArray;
+ _desc = new ArrayDescriptor( _theArray );
+
+ /* extra error checking -- probably not needed */
+ if (_desc == null ) {
+ HDFException ex =
+ new HDFJavaException("HDFArray: internal error: array description failed?: ");
+ throw(ex);
+ }
+}
+
+/**
+ * @return the allocated a one-dimensional array of bytes sufficient to store
+ * the array.
+ * @exception HDFException .
+ */
+
+public byte[] emptyBytes()
+throws HDFException
+{
+ byte[] b = null;
+ if ((ArrayDescriptor.dims == 1) && (ArrayDescriptor.NT == 'B')) {
+ b = (byte [])_theArray;
+ } else {
+ b = new byte[ArrayDescriptor.totalSize];
+ }
+ if (b == null) {
+ System.out.println("Error: HDFArray can't allocate bytes for array");
+ HDFException ex =
+ new HDFJavaException("HDFArray: emptyBytes: allocation failed");
+ throw(ex);
+ }
+ return (b);
+ //return (new byte[ArrayDescriptor.totalSize]);
+}
+
+/**
+ * @return convert of a Java array of numbers to a one-dimensional
+ * array of bytes in correct native order.
+ *
+ * @exception hdf.hdflib.HDFException
+ * thrown for errors:
+ * object is not array: HDFJavaException
+ */
+public byte[] byteify() throws HDFException{
+
+ if (_barray != null) {
+ return _barray;
+ }
+ if (_theArray == null) {
+ /* exception: not an array */
+ HDFException ex =
+ new HDFJavaException("HDFArray: byteify not an array?: ");
+ throw(ex);
+ }
+
+ if (ArrayDescriptor.dims == 1) {
+ /* special case */
+ if (ArrayDescriptor.NT == 'B') {
+ /* really special case! */
+ _barray = (byte [])_theArray;
+ return _barray;
+ } else {
+ try {
+ _barray = new byte[ArrayDescriptor.totalSize];
+
+ byte [] therow;
+ if (ArrayDescriptor.NT == 'I') {
+ therow = hdf.hdflib.HDFNativeData.intToByte(0,ArrayDescriptor.dimlen[1],(int [])_theArray);
+ } else if (ArrayDescriptor.NT == 'S') {
+ therow = hdf.hdflib.HDFNativeData.shortToByte(0,ArrayDescriptor.dimlen[1],(short [])_theArray);
+ } else if (ArrayDescriptor.NT == 'F') {
+ therow = hdf.hdflib.HDFNativeData.floatToByte(0,ArrayDescriptor.dimlen[1],(float [])_theArray);
+ } else if (ArrayDescriptor.NT == 'J') {
+ therow = hdf.hdflib.HDFNativeData.longToByte(0,ArrayDescriptor.dimlen[1],(long [])_theArray);
+ } else if (ArrayDescriptor.NT == 'D') {
+ therow = hdf.hdflib.HDFNativeData.doubleToByte(0,ArrayDescriptor.dimlen[1],(double [])_theArray);
+ } else if (ArrayDescriptor.NT == 'L') {
+ if (ArrayDescriptor.className.equals("java.lang.Byte")) {
+ therow = ByteObjToByte((Byte[])_theArray);
+ } else if (ArrayDescriptor.className.equals("java.lang.Integer")) {
+ therow = IntegerToByte((Integer[])_theArray);
+ } else if (ArrayDescriptor.className.equals("java.lang.Short")) {
+ therow = ShortToByte((Short[])_theArray);
+ } else if (ArrayDescriptor.className.equals("java.lang.Float")) {
+ therow = FloatObjToByte((Float[])_theArray);
+ } else if (ArrayDescriptor.className.equals("java.lang.Double")) {
+ therow = DoubleObjToByte((Double[])_theArray);
+ } else if (ArrayDescriptor.className.equals("java.lang.Long")) {
+ therow = LongObjToByte((Long[])_theArray);
+ } else {
+ HDFJavaException ex =
+ new HDFJavaException("HDFArray: unknown type of Object?");
+ throw(ex);
+ }
+ } else {
+ HDFJavaException ex =
+ new HDFJavaException("HDFArray: unknown type of Object?");
+ throw(ex);
+ }
+ System.arraycopy(therow,0,_barray,0,(ArrayDescriptor.dimlen[1] * ArrayDescriptor.NTsize));
+ return _barray;
+ } catch (OutOfMemoryError err) {
+ HDFException ex =
+ new HDFJavaException("HDFArray: byteify array too big?");
+ ex.printStackTrace();
+ throw(ex);
+ }
+ }
+ }
+
+ try {
+ _barray = new byte[ArrayDescriptor.totalSize];
+ } catch (OutOfMemoryError err) {
+ HDFException ex =
+ new HDFJavaException("HDFArray: byteify array too big?");
+ ex.printStackTrace();
+ throw(ex);
+ }
+
+
+ Object oo = _theArray;
+ int n = 0; /* the current byte */
+ int index = 0;
+ int i;
+ while ( n < ArrayDescriptor.totalSize ) {
+ oo = ArrayDescriptor.objs[0];
+ index = n / ArrayDescriptor.bytetoindex[0];
+ index %= ArrayDescriptor.dimlen[0];
+ for (i = 0 ; i < (ArrayDescriptor.dims); i++) {
+ index = n / ArrayDescriptor.bytetoindex[i];
+ index %= ArrayDescriptor.dimlen[i];
+
+ if (index == ArrayDescriptor.currentindex[i]) {
+ /* then use cached copy */
+ oo = ArrayDescriptor.objs[i];
+ } else {
+ /* check range of index */
+ if (index > (ArrayDescriptor.dimlen[i] - 1)) {
+ System.out.println("out of bounds?");
+ return null;
+ }
+ oo = java.lang.reflect.Array.get(oo,index);
+ ArrayDescriptor.currentindex[i] = index;
+ ArrayDescriptor.objs[i] = oo;
+ }
+ }
+
+ /* byte-ify */
+ byte arow[];
+ try {
+ if (ArrayDescriptor.NT == 'J') {
+ arow = hdf.hdflib.HDFNativeData.longToByte(0,ArrayDescriptor.dimlen[ArrayDescriptor.dims],(long [])ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ arow = hdf.hdflib.HDFNativeData.longToByte(0,ArrayDescriptor.dimlen[ArrayDescriptor.dims],(long [])ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ } else if (ArrayDescriptor.NT == 'I') {
+ arow = hdf.hdflib.HDFNativeData.intToByte(0,ArrayDescriptor.dimlen[ArrayDescriptor.dims],(int [])ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ } else if (ArrayDescriptor.NT == 'S') {
+ arow = hdf.hdflib.HDFNativeData.shortToByte(0,ArrayDescriptor.dimlen[ArrayDescriptor.dims],(short [])ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ } else if (ArrayDescriptor.NT == 'B') {
+ arow = (byte [])ArrayDescriptor.objs[ArrayDescriptor.dims - 1];
+ } else if (ArrayDescriptor.NT == 'F') {
+ /* 32 bit float */
+ arow = hdf.hdflib.HDFNativeData.floatToByte(0,ArrayDescriptor.dimlen[ArrayDescriptor.dims],(float [])ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ } else if (ArrayDescriptor.NT == 'D') {
+ /* 64 bit float */
+ arow = hdf.hdflib.HDFNativeData.doubleToByte(0,ArrayDescriptor.dimlen[ArrayDescriptor.dims],(double [])ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ } else if (ArrayDescriptor.NT == 'L') {
+ if (ArrayDescriptor.className.equals("java.lang.Byte")) {
+ arow = ByteObjToByte((Byte[])ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ } else if (ArrayDescriptor.className.equals("java.lang.Integer")) {
+ arow = IntegerToByte((Integer[])ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ } else if (ArrayDescriptor.className.equals("java.lang.Short")) {
+ arow = ShortToByte((Short[])ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ } else if (ArrayDescriptor.className.equals("java.lang.Float")) {
+ arow = FloatObjToByte((Float[])ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ } else if (ArrayDescriptor.className.equals("java.lang.Double")) {
+ arow = DoubleObjToByte((Double[])ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ } else if (ArrayDescriptor.className.equals("java.lang.Long")) {
+ arow = LongObjToByte((Long[])ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ } else {
+ HDFJavaException ex =
+ new HDFJavaException("HDFArray: byteify Object type not implemented?");
+ throw(ex);
+ }
+ } else {
+ HDFJavaException ex =
+ new HDFJavaException("HDFArray: byteify Object type not implemented?");
+ throw(ex);
+ }
+ System.arraycopy(arow,0,_barray,n,(ArrayDescriptor.dimlen[ArrayDescriptor.dims] * ArrayDescriptor.NTsize));
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ } catch (OutOfMemoryError err) {
+ HDFException ex =
+ new HDFJavaException("HDFArray: byteify array too big?");
+ ex.printStackTrace();
+ throw(ex);
+ }
+ }
+/* assert: the whole array is completed--currentindex should == len - 1 */
+
+ /* error checks */
+
+ if (n < ArrayDescriptor.totalSize) {
+ throw new java.lang.InternalError(
+ new String("HDFArray:::byteify: Panic didn't complete all input data: n= "+n+" size = "+ArrayDescriptor.totalSize));
+ }
+ for (i = 0;i < ArrayDescriptor.dims; i++) {
+ if (ArrayDescriptor.currentindex[i] != ArrayDescriptor.dimlen[i] - 1) {
+ throw new java.lang.InternalError(
+ new String("Panic didn't complete all data: currentindex["+i+"] = "+ArrayDescriptor.currentindex[i]+" (should be "+(ArrayDescriptor.dimlen[i] - 1)+" ?)"));
+ }
+ }
+ return _barray;
+}
+
+/**
+ * @param bytes a one-dimensional array of numbers
+ * @return convert it to a java array of the shape and size passed to the constructor.
+ *
+ * @exception hdf.hdflib.HDFException
+ * thrown for errors:
+ * object not an array: HDFJavaException
+ */
+public Object arrayify(byte[] bytes) throws HDFException {
+
+ if (_theArray == null) {
+ /* exception: not an array */
+ HDFException ex =
+ new HDFJavaException("arrayify: not an array?: ");
+ throw(ex);
+ }
+
+ if (java.lang.reflect.Array.getLength(bytes) != ArrayDescriptor.totalSize) {
+ /* exception: array not right size */
+ HDFException ex =
+ new HDFJavaException("arrayify: array is wrong size?: ");
+ }
+ _barray = bytes; /* hope that the bytes are correct.... */
+ if (ArrayDescriptor.dims == 1) {
+ /* special case */
+ /* 2 data copies here! */
+ try {
+ if (ArrayDescriptor.NT == 'I') {
+ int [] x = hdf.hdflib.HDFNativeData.byteToInt(_barray);
+ System.arraycopy(x,0,_theArray,0,ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ } else if (ArrayDescriptor.NT == 'S') {
+ short [] x = hdf.hdflib.HDFNativeData.byteToShort(_barray);
+ System.arraycopy(x,0,_theArray,0,ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ } else if (ArrayDescriptor.NT == 'F') {
+ float x[] = hdf.hdflib.HDFNativeData.byteToFloat(_barray);
+ System.arraycopy(x,0,_theArray,0,ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ } else if (ArrayDescriptor.NT == 'J') {
+ long x[] = hdf.hdflib.HDFNativeData.byteToLong(_barray);
+ System.arraycopy(x,0,_theArray,0,ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ } else if (ArrayDescriptor.NT == 'D') {
+ double x[] = hdf.hdflib.HDFNativeData.byteToDouble(_barray);
+ System.arraycopy(x,0,_theArray,0,ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ } else if (ArrayDescriptor.NT == 'B') {
+ System.arraycopy(_barray,0,_theArray,0,ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ } else if (ArrayDescriptor.NT == 'L') {
+ if (ArrayDescriptor.className.equals("java.lang.Byte")) {
+ Byte I[] = ByteToByteObj(_barray);
+ System.arraycopy(I,0,_theArray,0,ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ } else if (ArrayDescriptor.className.equals("java.lang.Integer")) {
+ Integer I[] = ByteToInteger(_barray);
+ System.arraycopy(I,0,_theArray,0,ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ } else if (ArrayDescriptor.className.equals("java.lang.Short")) {
+ Short I[] = ByteToShort(_barray);
+ System.arraycopy(I,0,_theArray,0,ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ } else if (ArrayDescriptor.className.equals("java.lang.Float")) {
+ Float I[] = ByteToFloatObj(_barray);
+ System.arraycopy(I,0,_theArray,0,ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ } else if (ArrayDescriptor.className.equals("java.lang.Double")) {
+ Double I[] = ByteToDoubleObj(_barray);
+ System.arraycopy(I,0,_theArray,0,ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ } else if (ArrayDescriptor.className.equals("java.lang.Long")) {
+ Long I[] = ByteToLongObj(_barray);
+ System.arraycopy(I,0,_theArray,0,ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ } else {
+ HDFJavaException ex =
+ new HDFJavaException("arrayify: Object type not implemented yet...");
+ throw(ex);
+ }
+ } else {
+ HDFJavaException ex =
+ new HDFJavaException("arrayify: Object type not implemented yet...");
+ throw(ex);
+ }
+ } catch (OutOfMemoryError err) {
+ HDFException ex =
+ new HDFJavaException("HDFArray: arrayify array too big?");
+ ex.printStackTrace();
+ throw(ex);
+ }
+ }
+ /* Assert dims >= 2 */
+
+ Object oo = _theArray;
+ int n = 0; /* the current byte */
+ int index = 0;
+ int i;
+ while ( n < ArrayDescriptor.totalSize ) {
+ oo = ArrayDescriptor.objs[0];
+ index = n / ArrayDescriptor.bytetoindex[0];
+ index %= ArrayDescriptor.dimlen[0];
+ for (i = 0 ; i < (ArrayDescriptor.dims); i++) {
+ index = n / ArrayDescriptor.bytetoindex[i];
+ index %= ArrayDescriptor.dimlen[i];
+
+ if (index == ArrayDescriptor.currentindex[i]) {
+ /* then use cached copy */
+ oo = ArrayDescriptor.objs[i];
+ } else {
+ /* check range of index */
+ if (index > (ArrayDescriptor.dimlen[i] - 1)) {
+ System.out.println("out of bounds?");
+ return null;
+ }
+ oo = java.lang.reflect.Array.get((Object) oo,index);
+ ArrayDescriptor.currentindex[i] = index;
+ ArrayDescriptor.objs[i] = oo;
+ }
+ }
+
+ /* array-ify */
+ try {
+ if (ArrayDescriptor.NT == 'J') {
+ long [] arow = hdf.hdflib.HDFNativeData.byteToLong(n,ArrayDescriptor.dimlen[ArrayDescriptor.dims],_barray);
+ java.lang.reflect.Array.set(ArrayDescriptor.objs[ArrayDescriptor.dims - 2] ,
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]), (Object)arow);
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ } else if (ArrayDescriptor.NT == 'I') {
+ int [] arow = hdf.hdflib.HDFNativeData.byteToInt(n,ArrayDescriptor.dimlen[ArrayDescriptor.dims],_barray);
+ java.lang.reflect.Array.set(ArrayDescriptor.objs[ArrayDescriptor.dims - 2] ,
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]), (Object)arow);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ } else if (ArrayDescriptor.NT == 'S') {
+ short [] arow = hdf.hdflib.HDFNativeData.byteToShort(n,ArrayDescriptor.dimlen[ArrayDescriptor.dims],_barray);
+ java.lang.reflect.Array.set(ArrayDescriptor.objs[ArrayDescriptor.dims - 2] ,
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]), (Object)arow);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ } else if (ArrayDescriptor.NT == 'B') {
+ System.arraycopy( _barray, n, ArrayDescriptor.objs[ArrayDescriptor.dims - 1], 0, ArrayDescriptor.dimlen[ArrayDescriptor.dims]);
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ } else if (ArrayDescriptor.NT == 'F') {
+ float arow[] = hdf.hdflib.HDFNativeData.byteToFloat(n,ArrayDescriptor.dimlen[ArrayDescriptor.dims],_barray);
+ java.lang.reflect.Array.set(ArrayDescriptor.objs[ArrayDescriptor.dims - 2] ,
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]), (Object)arow);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ } else if (ArrayDescriptor.NT == 'D') {
+ double [] arow = hdf.hdflib.HDFNativeData.byteToDouble(n,ArrayDescriptor.dimlen[ArrayDescriptor.dims],_barray);
+ java.lang.reflect.Array.set(ArrayDescriptor.objs[ArrayDescriptor.dims - 2] ,
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]), (Object)arow);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ } else if (ArrayDescriptor.NT == 'L') {
+ if (ArrayDescriptor.className.equals("java.lang.Byte")) {
+ Byte I[] = ByteToByteObj(n,ArrayDescriptor.dimlen[ArrayDescriptor.dims],_barray);
+ java.lang.reflect.Array.set(ArrayDescriptor.objs[ArrayDescriptor.dims - 2] ,
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+ (Object)I);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ } else if (ArrayDescriptor.className.equals("java.lang.Integer")) {
+ Integer I[] = ByteToInteger(n,ArrayDescriptor.dimlen[ArrayDescriptor.dims],_barray);
+ java.lang.reflect.Array.set(ArrayDescriptor.objs[ArrayDescriptor.dims - 2] ,
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+ (Object)I);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ } else if (ArrayDescriptor.className.equals("java.lang.Short")) {
+ Short I[] = ByteToShort(n,ArrayDescriptor.dimlen[ArrayDescriptor.dims],_barray);
+ java.lang.reflect.Array.set(ArrayDescriptor.objs[ArrayDescriptor.dims - 2] ,
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+ (Object)I);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ } else if (ArrayDescriptor.className.equals("java.lang.Float")) {
+ Float I[] = ByteToFloatObj(n,ArrayDescriptor.dimlen[ArrayDescriptor.dims],_barray);
+ java.lang.reflect.Array.set(ArrayDescriptor.objs[ArrayDescriptor.dims - 2] ,
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+ (Object)I);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ } else if (ArrayDescriptor.className.equals("java.lang.Double")) {
+ Double I[] = ByteToDoubleObj(n,ArrayDescriptor.dimlen[ArrayDescriptor.dims],_barray);
+ java.lang.reflect.Array.set(ArrayDescriptor.objs[ArrayDescriptor.dims - 2] ,
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+ (Object)I);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ } else if (ArrayDescriptor.className.equals("java.lang.Long")) {
+ Long I[] = ByteToLongObj(n,ArrayDescriptor.dimlen[ArrayDescriptor.dims],_barray);
+ java.lang.reflect.Array.set(ArrayDescriptor.objs[ArrayDescriptor.dims - 2] ,
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+ (Object)I);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ } else {
+ HDFJavaException ex =
+ new HDFJavaException("HDFArray: unsupported Object type: "+ArrayDescriptor.NT);
+ throw(ex);
+ }
+ } else {
+ HDFJavaException ex =
+ new HDFJavaException("HDFArray: unsupported Object type: "+ArrayDescriptor.NT);
+ throw(ex);
+ }
+ } catch (OutOfMemoryError err) {
+ HDFException ex =
+ (HDFException)new HDFJavaException("HDFArray: arrayify array too big?");
+ ex.printStackTrace();
+ throw(ex);
+ }
+
+ }
+
+/* assert: the whole array is completed--currentindex should == len - 1 */
+
+ /* error checks */
+
+ if (n < ArrayDescriptor.totalSize) {
+ throw new java.lang.InternalError(
+ new String("HDFArray::arrayify Panic didn't complete all input data: n= "+n+" size = "+ArrayDescriptor.totalSize));
+ }
+ for (i = 0;i <= ArrayDescriptor.dims-2; i++) {
+ if (ArrayDescriptor.currentindex[i] != ArrayDescriptor.dimlen[i] - 1) {
+ throw new java.lang.InternalError(
+ new String("HDFArray::arrayify Panic didn't complete all data: currentindex["+i+"] = "+ArrayDescriptor.currentindex[i]+" (should be "+(ArrayDescriptor.dimlen[i] - 1)+"?"));
+ }
+ }
+ if (ArrayDescriptor.NT != 'B') {
+ if (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1] != ArrayDescriptor.dimlen[ArrayDescriptor.dims - 1]) {
+ throw new java.lang.InternalError(
+ new String("HDFArray::arrayify Panic didn't complete all data: currentindex["+i+"] = "+ArrayDescriptor.currentindex[i]+" (should be "+(ArrayDescriptor.dimlen[i])+"?"));
+ }
+ } else {
+ if (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1] != (ArrayDescriptor.dimlen[ArrayDescriptor.dims - 1] - 1)) {
+ throw new java.lang.InternalError(
+ new String("HDFArray::arrayify Panic didn't complete all data: currentindex["+i+"] = "+ArrayDescriptor.currentindex[i]+" (should be "+(ArrayDescriptor.dimlen[i] - 1)+"?"));
+ }
+ }
+
+ return _theArray;
+}
+
+private byte[] IntegerToByte( Integer in[] ) {
+ int nelems = java.lang.reflect.Array.getLength((Object)in);
+ int[] out = new int[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = in[i].intValue();
+ }
+ return HDFNativeData.intToByte(0,nelems,out);
+}
+
+private Integer[] ByteToInteger( byte[] bin ) {
+ int in[] = (int [])HDFNativeData.byteToInt(bin);
+ int nelems = java.lang.reflect.Array.getLength((Object)in);
+ Integer[] out = new Integer[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Integer(in[i]);
+ }
+ return out;
+}
+private Integer[] ByteToInteger( int start, int len, byte[] bin ) {
+ int in[] = (int [])HDFNativeData.byteToInt(start,len,bin);
+ int nelems = java.lang.reflect.Array.getLength((Object)in);
+ Integer[] out = new Integer[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Integer(in[i]);
+ }
+ return out;
+}
+
+
+private byte[] ShortToByte( Short in[] ) {
+ int nelems = java.lang.reflect.Array.getLength((Object)in);
+ short[] out = new short[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = in[i].shortValue();
+ }
+ return HDFNativeData.shortToByte(0,nelems,out);
+}
+
+private Short[] ByteToShort( byte[] bin ) {
+ short in[] = (short [])HDFNativeData.byteToShort(bin);
+ int nelems = java.lang.reflect.Array.getLength((Object)in);
+ Short[] out = new Short[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Short(in[i]);
+ }
+ return out;
+}
+
+private Short[] ByteToShort( int start, int len, byte[] bin ) {
+ short in[] = (short [])HDFNativeData.byteToShort(start,len,bin);
+ int nelems = java.lang.reflect.Array.getLength((Object)in);
+ Short[] out = new Short[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Short(in[i]);
+ }
+ return out;
+}
+
+private byte[] ByteObjToByte( Byte in[] ) {
+ int nelems = java.lang.reflect.Array.getLength((Object)in);
+ byte[] out = new byte[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = in[i].byteValue();
+ }
+ return out;
+}
+
+private Byte[] ByteToByteObj( byte[] bin ) {
+ int nelems = java.lang.reflect.Array.getLength((Object)bin);
+ Byte[] out = new Byte[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Byte(bin[i]);
+ }
+ return out;
+}
+
+private Byte[] ByteToByteObj( int start, int len, byte[] bin ) {
+ Byte[] out = new Byte[len];
+
+ for (int i = 0; i < len; i++) {
+ out[i] = new Byte(bin[i]);
+ }
+ return out;
+}
+
+private byte[] FloatObjToByte( Float in[] ) {
+ int nelems = java.lang.reflect.Array.getLength((Object)in);
+ float[] out = new float[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = in[i].floatValue();
+ }
+ return HDFNativeData.floatToByte(0,nelems,out);
+}
+
+private Float[] ByteToFloatObj( byte[] bin ) {
+ float in[] = (float [])HDFNativeData.byteToFloat(bin);
+ int nelems = java.lang.reflect.Array.getLength((Object)in);
+ Float[] out = new Float[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Float(in[i]);
+ }
+ return out;
+}
+
+private Float[] ByteToFloatObj( int start, int len, byte[] bin ) {
+ float in[] = (float [])HDFNativeData.byteToFloat(start,len,bin);
+ int nelems = java.lang.reflect.Array.getLength((Object)in);
+ Float[] out = new Float[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Float(in[i]);
+ }
+ return out;
+}
+
+private byte[] DoubleObjToByte( Double in[] ) {
+ int nelems = java.lang.reflect.Array.getLength((Object)in);
+ double[] out = new double[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = in[i].doubleValue();
+ }
+ return HDFNativeData.doubleToByte(0,nelems,out);
+}
+
+private Double[] ByteToDoubleObj( byte[] bin ) {
+ double in[] = (double [])HDFNativeData.byteToDouble(bin);
+ int nelems = java.lang.reflect.Array.getLength((Object)in);
+ Double[] out = new Double[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Double(in[i]);
+ }
+ return out;
+}
+
+private Double[] ByteToDoubleObj( int start, int len, byte[] bin ) {
+ double in[] = (double [])HDFNativeData.byteToDouble(start,len,bin);
+ int nelems = java.lang.reflect.Array.getLength((Object)in);
+ Double[] out = new Double[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Double(in[i]);
+ }
+ return out;
+}
+
+private byte[] LongObjToByte( Long in[] ) {
+ int nelems = java.lang.reflect.Array.getLength((Object)in);
+ long[] out = new long[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = in[i].longValue();
+ }
+ return HDFNativeData.longToByte(0,nelems,out);
+}
+
+private Long[] ByteToLongObj( byte[] bin ) {
+ long in[] = (long [])HDFNativeData.byteToLong(bin);
+ int nelems = java.lang.reflect.Array.getLength((Object)in);
+ Long[] out = new Long[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Long(in[i]);
+ }
+ return out;
+}
+
+private Long[] ByteToLongObj( int start, int len, byte[] bin ) {
+ long in[] = (long [])HDFNativeData.byteToLong(start,len,bin);
+ int nelems = java.lang.reflect.Array.getLength((Object)in);
+ Long[] out = new Long[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Long(in[i]);
+ }
+ return out;
+}
+
+}
+
+
+/**
+ * This class is used by HDFArray to discover the shape and type of an
+ * arbitrary array.
+ */
+
+class ArrayDescriptor {
+
+ static String theType = "";
+ static Class theClass = null;
+ static int [] dimlen = null;
+ static int [] dimstart = null;
+ static int [] currentindex = null;
+ static int [] bytetoindex = null;
+ static int totalSize = 0;
+ static Object [] objs = null;
+ static char NT = ' '; /* must be B,S,I,L,F,D, else error */
+ static int NTsize = 0;
+ static int dims = 0;
+ static String className;
+
+ public ArrayDescriptor ( Object anArray ) throws HDFException {
+
+ Class tc = anArray.getClass();
+ if (tc.isArray() == false) {
+ /* exception: not an array */
+ HDFException ex =
+ (HDFException)new HDFJavaException("ArrayDescriptor: not an array?: ");
+ throw(ex);
+ }
+
+ theClass = tc;
+
+ /* parse the type descriptor to discover the
+ shape of the array */
+ String ss = tc.toString();
+ theType = ss;
+ int n = 6;
+ dims = 0;
+ char c = ' ';
+ while (n < ss.length()) {
+ c = ss.charAt(n);
+ n++;
+ if (c == '[') {
+ dims++;
+ }
+ }
+
+ String css = ss.substring(ss.lastIndexOf('[')+1);
+ Class compC = tc.getComponentType();
+ String cs = compC.toString();
+ NT = c;
+ if (NT == 'B') {
+ NTsize = 1;
+ } else if (NT == 'S') {
+ NTsize = 2;
+ } else if ((NT == 'I') || (NT == 'F')) {
+ NTsize = 4;
+ } else if ((NT == 'J') || (NT == 'D')){
+ NTsize = 8;
+ } else if (css.startsWith("Ljava.lang.Byte")) {
+ NT='L';
+ className = "java.lang.Byte";
+ NTsize = 1;
+ } else if (css.startsWith("Ljava.lang.Short")) {
+ NT='L';
+ className = "java.lang.Short";
+ NTsize = 2;
+ } else if (css.startsWith("Ljava.lang.Integer")) {
+ NT='L';
+ className = "java.lang.Integer";
+ NTsize = 4;
+ } else if (css.startsWith("Ljava.lang.Float")) {
+ NT='L';
+ className = "java.lang.Float";
+ NTsize = 4;
+ } else if (css.startsWith("Ljava.lang.Double")) {
+ NT='L';
+ className = "java.lang.Double";
+ NTsize = 8;
+ } else if (css.startsWith("Ljava.lang.Long")) {
+ NT='L';
+ className = "java.lang.Long";
+ NTsize = 8;
+ } else if (css.startsWith("Ljava.lang.String")) {
+throw new HDFJavaException(new String("ArrayDesciptor: Error: String array not supported yet"));
+ } else {
+ /* exception: not a numeric type */
+throw new HDFJavaException(new String("Error: array is not numeric? (type is "+css+")"));
+ }
+
+ /* fill in the table */
+ dimlen = new int [dims+1];
+ dimstart = new int [dims+1];
+ currentindex = new int [dims+1];
+ bytetoindex = new int [dims+1];
+ objs = new Object [dims+1];
+
+ Object o = anArray;
+ objs[0] = o;
+ dimlen[0]= 1;
+ dimstart[0] = 0;
+ currentindex[0] = 0;
+ int i;
+ for ( i = 1; i <= dims; i++) {
+ dimlen[i]= java.lang.reflect.Array.getLength((Object) o);
+ o = java.lang.reflect.Array.get((Object) o,0);
+ objs [i] = o;
+ dimstart[i] = 0;
+ currentindex[i] = 0;
+ }
+
+ int j;
+ int dd;
+ bytetoindex[dims] = NTsize;
+ for ( i = dims; i >= 0; i--) {
+ dd = NTsize;
+ for (j = i; j < dims; j++) {
+ dd *= dimlen[j + 1];
+ }
+ bytetoindex[i] = dd;
+ }
+
+ totalSize = bytetoindex[0];
+ }
+
+ public void dumpInfo()
+ {
+ System.out.println("Type: "+theType);
+ System.out.println("Class: "+theClass);
+ System.out.println("NT: "+NT+" NTsize: "+NTsize);
+ System.out.println("Array has "+dims+" dimensions ("+totalSize+" bytes)");
+ int i;
+ for (i = 0; i <= dims; i++) {
+ Class tc = objs[i].getClass();
+ String ss = tc.toString();
+ System.out.println(i+": start "+dimstart[i]+": len "+dimlen[i]+" current "+currentindex[i]+" bytetoindex "+bytetoindex[i]+" object "+objs[i]+" otype "+ss);
+ }
+ }
+}
diff --git a/java/src/hdf/hdflib/HDFChunkInfo.java b/java/src/hdf/hdflib/HDFChunkInfo.java
new file mode 100644
index 0000000..c8e7466
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFChunkInfo.java
@@ -0,0 +1,52 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+/**
+ * <p>
+ * This is a generic class to represent the HDF chunk_info
+ * union, which contains parameters for the different
+ * chunking schemes.
+ * <p>
+ * The variant parameters are expressed as sub-classes of this
+ * class.
+ */
+
+
+public class HDFChunkInfo {
+ public int ctype;
+ public int[] chunk_lengths = new int[HDFConstants.MAX_VAR_DIMS];
+ public int comp_type = HDFConstants.COMP_CODE_NONE;
+ public HDFCompInfo cinfo = null;
+
+ public HDFChunkInfo() {
+ ctype = HDFConstants.HDF_NONE;
+ }
+
+ public HDFChunkInfo( int[] cl, int ct, HDFCompInfo ci ) {
+ if (ct == HDFConstants.COMP_CODE_NONE) {
+ ctype = HDFConstants.HDF_CHUNK;
+ } else {
+ ctype = HDFConstants.HDF_COMP | HDFConstants.HDF_CHUNK;
+ }
+ chunk_lengths = cl;
+ comp_type = ct;
+ cinfo = ci;
+ }
+
+ public HDFChunkInfo(int[] cl) {
+ ctype = HDFConstants.HDF_CHUNK;
+ chunk_lengths = cl;
+ }
+}
diff --git a/java/src/hdf/hdflib/HDFCompInfo.java b/java/src/hdf/hdflib/HDFCompInfo.java
new file mode 100644
index 0000000..0081d3c
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFCompInfo.java
@@ -0,0 +1,33 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+/**
+ * <p>
+ * This class is a container for the parameters to the HDF
+ * comp_info union.
+ * <p>
+ * The parameters for different compression schemes are
+ * represented as sub-classes of this class
+ */
+
+
+public class HDFCompInfo {
+ public int ctype;
+ public HDFCompInfo() {
+ ctype = HDFConstants.COMP_CODE_NONE;
+ } ;
+}
+
+
diff --git a/java/src/hdf/hdflib/HDFConstants.java b/java/src/hdf/hdflib/HDFConstants.java
new file mode 100644
index 0000000..60d66ef
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFConstants.java
@@ -0,0 +1,419 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+/**
+ * <p>
+ * This interface defines the values of constants defined
+ * by the HDF 4.2 API.
+ */
+public class HDFConstants
+{
+ /** FAIL */
+ public static final int FAIL = -1;
+
+ // file access code definitions
+ public static final int DFACC_READ = 1;
+ public static final int DFACC_WRITE= 2;
+ public static final int DFACC_RDWR = 3;
+ public static final int DFACC_CREATE=4;
+ public static final int DFACC_RDONLY=DFACC_READ;
+ public static final int DFACC_DEFAULT=000;
+ public static final int DFACC_SERIAL=001;
+ public static final int DFACC_PARALLEL=011;
+
+ // annotation type in HDF
+ public static final int AN_DATA_LABEL = 0;
+ public static final int AN_DATA_DESC = AN_DATA_LABEL + 1;
+ public static final int AN_FILE_LABEL = AN_DATA_LABEL + 2;
+ public static final int AN_FILE_DESC = AN_DATA_LABEL + 3;
+
+ // HDF Tag Definations
+
+ public static final int DFREF_WILDCARD = 0;
+ public static final int DFTAG_WILDCARD = 0;
+
+ public static final int DFREF_NONE = 0; // used by mfhdf/libsrc/putget.c
+
+ // tags and refs
+ public static final int DFTAG_NULL = 1;
+ public static final int DFTAG_LINKED = 20; // linked-block special element
+ public static final int DFTAG_VERSION = 30;
+ public static final int DFTAG_COMPRESSED = 40; // compressed special element
+ public static final int DFTAG_VLINKED = 50; // variable-len linked-block header
+ public static final int DFTAG_VLINKED_DATA = 51; // variable-len linked-block data
+ public static final int DFTAG_CHUNKED = 60; // chunked special element header
+ public static final int DFTAG_CHUNK = 61; // chunk element
+
+ // utility set
+ public static final int DFTAG_FID = 100; // File identifier
+ public static final int DFTAG_FD = 101; // File description
+ public static final int DFTAG_TID = 102; // Tag identifier
+ public static final int DFTAG_TD = 103; // Tag descriptor
+ public static final int DFTAG_DIL = 104; // data identifier label
+ public static final int DFTAG_DIA = 105; // data identifier annotation
+ public static final int DFTAG_NT = 106; // number type
+ public static final int DFTAG_MT = 107; // machine type
+ public static final int DFTAG_FREE = 108; // free space in the file
+
+ // raster-8 set
+ public static final int DFTAG_ID8 = 200; // 8-bit Image dimension
+ public static final int DFTAG_IP8 = 201; // 8-bit Image palette
+ public static final int DFTAG_RI8 = 202; // Raster-8 image
+ public static final int DFTAG_CI8 = 203; // RLE compressed 8-bit image
+ public static final int DFTAG_II8 = 204; // IMCOMP compressed 8-bit image
+
+ // Raster Image set
+ public static final int DFTAG_ID = 300; // Image DimRec
+ public static final int DFTAG_LUT = 301; // Image Palette
+ public static final int DFTAG_RI = 302; // Raster Image
+ public static final int DFTAG_CI = 303; // Compressed Image
+ public static final int DFTAG_NRI = 304; // New-format Raster Image
+
+ public static final int DFTAG_RIG = 306; // Raster Image Group
+ public static final int DFTAG_LD = 307; // Palette DimRec
+ public static final int DFTAG_MD = 308; // Matte DimRec
+ public static final int DFTAG_MA = 309; // Matte Data
+ public static final int DFTAG_CCN = 310; // color correction
+ public static final int DFTAG_CFM = 311; // color format
+ public static final int DFTAG_AR = 312; // aspect ratio
+
+ public static final int DFTAG_DRAW = 400; // Draw these images in sequence
+ public static final int DFTAG_RUN = 401; // run this as a program/script
+
+ public static final int DFTAG_XYP = 500; // x-y position
+ public static final int DFTAG_MTO = 501; // machine-type override
+
+ // Tektronix
+ public static final int DFTAG_T14 = 602; // TEK 4014 data
+ public static final int DFTAG_T105 = 603; // TEK 4105 data
+
+ // Scientific Data set
+ // Objects of tag 721 are never actually written to the file. The tag is
+ // needed to make things easier mixing DFSD and SD style objects in the
+ // same file
+
+ public static final int DFTAG_SDG = 700; // Scientific Data Group
+ public static final int DFTAG_SDD = 701; // Scientific Data DimRec
+ public static final int DFTAG_SD = 702; // Scientific Data
+ public static final int DFTAG_SDS = 703; // Scales
+ public static final int DFTAG_SDL = 704; // Labels
+ public static final int DFTAG_SDU = 705; // Units
+ public static final int DFTAG_SDF = 706; // Formats
+ public static final int DFTAG_SDM = 707; // Max/Min
+ public static final int DFTAG_SDC = 708; // Coord sys
+ public static final int DFTAG_SDT = 709; // Transpose
+ public static final int DFTAG_SDLNK = 710; // Links related to the dataset
+ public static final int DFTAG_NDG = 720; // Numeric Data Group
+ public static final int DFTAG_CAL = 731; // Calibration information
+ public static final int DFTAG_FV = 732; // Fill Value information
+ public static final int DFTAG_BREQ = 799; // Beginning of required tags
+ public static final int DFTAG_SDRAG = 781; // List of ragged array line lengths
+ public static final int DFTAG_EREQ = 780; // Current end of the range
+
+ // VSets
+ public static final int DFTAG_VG = 1965; // Vgroup
+ public static final int DFTAG_VH = 1962; // Vdata Header
+ public static final int DFTAG_VS = 1963; // Vdata Storage
+
+ // compression schemes
+ public static final int DFTAG_RLE = 11; // run length encoding
+ public static final int DFTAG_IMC = 12; // IMCOMP compression alias
+ public static final int DFTAG_IMCOMP = 12; // IMCOMP compression
+ public static final int DFTAG_JPEG = 13; // JPEG compression (24-bit data)
+ public static final int DFTAG_GREYJPEG = 14; // JPEG compression (8-bit data)
+ public static final int DFTAG_JPEG5 = 15; // JPEG compression (24-bit data)
+ public static final int DFTAG_GREYJPEG5 = 16; // JPEG compression (8-bit data)
+
+ /** pixel interlacing scheme */
+ public static final int MFGR_INTERLACE_PIXEL = 0;
+
+ /** line interlacing scheme */
+ public static final int MFGR_INTERLACE_LINE = MFGR_INTERLACE_PIXEL +1;
+
+ /** component interlacing scheme */
+ public static final int MFGR_INTERLACE_COMPONENT = MFGR_INTERLACE_PIXEL +2;
+
+ /** interlacing supported by the vset.*/
+ public static final int FULL_INTERLACE = 0;
+ public static final int NO_INTERLACE = 1;
+
+ /** unsigned char */
+ public static final int DFNT_UCHAR8 = 3;
+ public static final int DFNT_UCHAR = 3;
+
+ /** char */
+ public static final int DFNT_CHAR8 = 4;
+ public static final int DFNT_CHAR = 4;
+
+ /** No supported by HDF */
+ public static final int DFNT_CHAR16 = 42;
+ public static final int DFNT_UCHAR16= 43;
+
+
+ /** float */
+ public static final int DFNT_FLOAT32 = 5;
+ public static final int DFNT_FLOAT = 5 ;
+
+ //** double */
+ public static final int DFNT_FLOAT64 = 6;
+ public static final int DFNT_FLOAT128 = 7 ;
+ public static final int DFNT_DOUBLE = 6 ;
+
+ /** 8-bit integer */
+ public static final int DFNT_INT8 = 20;
+
+ /** unsigned 8-bit interger */
+ public static final int DFNT_UINT8 = 21;
+
+ /** short */
+ public static final int DFNT_INT16 = 22;
+
+ /** unsigned interger */
+ public static final int DFNT_UINT16 = 23;
+
+ /** interger */
+ public static final int DFNT_INT32 = 24;
+
+ /** unsigned interger */
+ public static final int DFNT_UINT32 = 25;
+
+ /** No supported */
+ public static final int DFNT_INT64 = 26;
+ public static final int DFNT_UINT64 = 27;
+ public static final int DFNT_INT128 = 28;
+ public static final int DFNT_UINT128 = 30;
+
+ public static final int DFNT_HDF = 0x00000000;
+ public static final int DFNT_NATIVE = 0x00001000;
+ public static final int DFNT_CUSTOM = 0x00002000;
+ public static final int DFNT_LITEND = 0x00004000;
+ public static final int DFNT_MASK = 0x00000fff;
+
+ public static final int DF_FORWARD = 1;
+ public static final int DFS_MAXLEN = 255;
+
+ public static final int COMP_NONE = 0;
+ public static final int COMP_JPEG = 2;
+ public static final int COMP_RLE = 11;
+ public static final int COMP_IMCOMP = 12;
+ public static final int COMP_CODE_NONE = 0;
+ public static final int COMP_CODE_RLE = 1;
+ public static final int COMP_CODE_NBIT = 2;
+ public static final int COMP_CODE_SKPHUFF = 3;
+ public static final int COMP_CODE_DEFLATE = 4;
+ public static final int COMP_CODE_SZIP = 5;
+ public static final int COMP_CODE_INVALID = 6;
+ public static final int COMP_CODE_JPEG = 7;
+
+ // Interlace schemes
+ public static final int DFIL_PIXEL = 0; /* Pixel Interlacing */
+ public static final int DFIL_LINE = 1; /* Scan Line Interlacing */
+ public static final int DFIL_PLANE = 2; /* Scan Plane Interlacing */
+
+ public static final int SD_UNLIMITED = 0;
+ public static final int SD_FILL = 0;
+ public static final int SD_NOFILL = 0x100;
+ public static final int SD_DIMVAL_BW_COMP = 1;
+ public static final int SD_DIMVAL_BW_INCOMP = 0;
+
+ public static final int HDF_NONE = 0x0;
+ public static final int HDF_CHUNK = 0x1;
+ public static final int HDF_COMP = 0x3;
+ public static final int HDF_NBIT = 0x5;
+ public static final int MAX_VAR_DIMS =32;
+
+ //the names of the Vgroups created by the GR interface
+ public static final String GR_NAME = "RIG0.0";
+ public static final String RI_NAME = "RI0.0";
+ public static final String RIGATTRNAME = "RIATTR0.0N";
+ public static final String RIGATTRCLASS = "RIATTR0.0C";
+
+ // names of classes of the Vdatas/Vgroups created by the SD interface
+ public static final String HDF_ATTRIBUTE = "Attr0.0";
+ public static final String HDF_VARIABLE = "Var0.0";
+ public static final String HDF_DIMENSION = "Dim0.0";
+ public static final String HDF_UDIMENSION = "UDim0.0";
+ public static final String DIM_VALS = "DimVal0.0";
+ public static final String DIM_VALS01 = "DimVal0.1";
+ public static final String HDF_CHK_TBL = "_HDF_CHK_TBL_";
+ public static final String HDF_SDSVAR = "SDSVar";
+ public static final String HDF_CRDVAR ="CoordVar";
+
+ public static final String HDF_CDF = "CDF0.0";
+
+ // names of data object types
+ public static final String ANNOTATION = "HDF_ANNOTATION";
+ public static final String RI8 = "HDF_RI8";
+ public static final String RI24 = "HDF_RI24";
+ public static final String GR = "HDF_GR";
+ public static final String SDS = "HDF_SDS";
+ public static final String VDATA = "HDF_VDATA";
+ public static final String VGROUP = "HDF_GROUP";
+
+ // data types represented by Strings
+ public static final String UCHAR8 = "UCHAR8";
+ public static final String CHAR8 = "CHAR8";
+ public static final String UCHAR16 = "UCHAR16";
+ public static final String CHAR16 = "CHAR16";
+ public static final String FLOAT32 = "FLOAT32";
+ public static final String FLOAT64 = "FLOAT64";
+ public static final String FLOAT128 = "FLOAT128";
+ public static final String INT8 = "INT8";
+ public static final String UINT8 = "UINT8";
+ public static final String INT16 = "INT16";
+ public static final String UINT16 = "UINT16";
+ public static final String INT32 = "INT32";
+ public static final String UINT32 = "UINT32";
+ public static final String INT64 = "INT64";
+ public static final String UINT64 = "UINT64";
+ public static final String INT128 = "INT128";
+ public static final String UINT128 = "UINT128";
+
+
+ /**
+ * convert number type to string type
+ * @param type the number representing the data type
+ * @return the string representing the data type
+ */
+ public static String getType(int type)
+ {
+ if (type == HDFConstants.DFNT_UCHAR8) {
+ return HDFConstants.UCHAR8;
+ } else if (type == HDFConstants.DFNT_CHAR8) {
+ return HDFConstants.CHAR8;
+ } else if (type == HDFConstants.DFNT_UCHAR16) {
+ return HDFConstants.UCHAR16;
+ } else if (type == HDFConstants.DFNT_CHAR16) {
+ return HDFConstants.CHAR16;
+ } else if (type == HDFConstants.DFNT_FLOAT32) {
+ return HDFConstants.FLOAT32;
+ } else if (type == HDFConstants.DFNT_FLOAT64) {
+ return HDFConstants.FLOAT64;
+ } else if (type == HDFConstants.DFNT_FLOAT128) {
+ return HDFConstants.FLOAT128;
+ } else if (type == HDFConstants.DFNT_INT8) {
+ return HDFConstants.INT8;
+ } else if (type == HDFConstants. DFNT_UINT8) {
+ return HDFConstants.UINT8;
+ } else if (type == HDFConstants.DFNT_INT16) {
+ return HDFConstants.INT16;
+ } else if (type == HDFConstants.DFNT_UINT16) {
+ return HDFConstants.UINT16;
+ } else if (type == HDFConstants.DFNT_INT32) {
+ return HDFConstants.INT32;
+ } else if (type == HDFConstants.DFNT_UINT32) {
+ return HDFConstants.UINT32;
+ } else if (type == HDFConstants.DFNT_INT64) {
+ return HDFConstants.INT64;
+ } else if (type == HDFConstants.DFNT_UINT64) {
+ return HDFConstants.UINT64;
+ } else if (type == HDFConstants.DFNT_INT128) {
+ return HDFConstants.INT128;
+ } else if (type == HDFConstants.DFNT_UINT128) {
+ return HDFConstants.UINT128;
+ } else {
+ return "Undefined Data Type";
+ }
+ }
+
+ /**
+ * convert string type to number type
+ * @param type the string representing the data type
+ * @return the integer representing the data type
+ */
+ public static int getType(String type)
+ {
+ if (type.equalsIgnoreCase(HDFConstants.UCHAR8)) {
+ return HDFConstants.DFNT_UCHAR8;
+ } else if (type.equalsIgnoreCase(HDFConstants.CHAR8)) {
+ return HDFConstants.DFNT_CHAR8;
+ } else if (type.equalsIgnoreCase(HDFConstants.UCHAR16)) {
+ return HDFConstants.DFNT_UCHAR16;
+ } else if (type.equalsIgnoreCase(HDFConstants.CHAR16)) {
+ return HDFConstants.DFNT_CHAR16;
+ } else if (type.equalsIgnoreCase(HDFConstants.FLOAT32)) {
+ return HDFConstants.DFNT_FLOAT32;
+ } else if (type.equalsIgnoreCase(HDFConstants.FLOAT64)) {
+ return HDFConstants.DFNT_FLOAT64;
+ } else if (type.equalsIgnoreCase(HDFConstants.FLOAT128)) {
+ return HDFConstants.DFNT_FLOAT128;
+ } else if (type.equalsIgnoreCase(HDFConstants.INT8)) {
+ return HDFConstants.DFNT_INT8;
+ } else if (type.equalsIgnoreCase(HDFConstants. UINT8)) {
+ return HDFConstants.DFNT_UINT8;
+ } else if (type.equalsIgnoreCase(HDFConstants.INT16)) {
+ return HDFConstants.DFNT_INT16;
+ } else if (type.equalsIgnoreCase(HDFConstants.UINT16)) {
+ return HDFConstants.DFNT_UINT16;
+ } else if (type.equalsIgnoreCase(HDFConstants.INT32)) {
+ return HDFConstants.DFNT_INT32;
+ } else if (type.equalsIgnoreCase(HDFConstants.UINT32)) {
+ return HDFConstants.DFNT_UINT32;
+ } else if (type.equalsIgnoreCase(HDFConstants.INT64)) {
+ return HDFConstants.DFNT_INT64;
+ } else if (type.equalsIgnoreCase(HDFConstants.UINT64)) {
+ return HDFConstants.DFNT_UINT64;
+ } else if (type.equalsIgnoreCase(HDFConstants.INT128)) {
+ return HDFConstants.DFNT_INT128;
+ } else if (type.equalsIgnoreCase(HDFConstants.UINT128)) {
+ return HDFConstants.DFNT_UINT128;
+ } else {
+ return -1;
+ }
+ }
+
+ /**
+ * gets the size of the data type in bytes,
+ * e.g size of DFNT_FLOAT32 = 4
+ *
+ * @param type the number representing the data type
+ * @return the size of the data type
+ */
+ public static int getTypeSize(int type)
+ {
+ int size = 0;
+
+ switch(type)
+ {
+ case HDFConstants.DFNT_UCHAR16:
+ case HDFConstants.DFNT_CHAR16:
+ case HDFConstants.DFNT_INT16:
+ case HDFConstants.DFNT_UINT16:
+ size = 2;
+ break;
+ case HDFConstants.DFNT_FLOAT32:
+ case HDFConstants.DFNT_INT32:
+ case HDFConstants.DFNT_UINT32:
+ size = 4;
+ break;
+ case HDFConstants.DFNT_FLOAT64:
+ case HDFConstants.DFNT_INT64:
+ case HDFConstants.DFNT_UINT64:
+ size = 8;
+ break;
+ case HDFConstants.DFNT_FLOAT128:
+ case HDFConstants.DFNT_INT128:
+ case HDFConstants.DFNT_UINT128:
+ size = 16;
+ break;
+ default:
+ size = 1;
+ break;
+ }
+
+ return size;
+ }
+
+}
diff --git a/java/src/hdf/hdflib/HDFDeflateCompInfo.java b/java/src/hdf/hdflib/HDFDeflateCompInfo.java
new file mode 100644
index 0000000..1730bb9
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFDeflateCompInfo.java
@@ -0,0 +1,39 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+/**
+ * <p>
+ * This class is a container for the parameters to the HDF
+ * DEFLATION compression algorithm.
+ * <p>
+ * In this case, the only parameter is the ``level'' of deflation.
+ */
+
+
+public class HDFDeflateCompInfo extends HDFNewCompInfo {
+
+ public int level;
+
+ public HDFDeflateCompInfo() {
+ ctype = HDFConstants.COMP_CODE_DEFLATE;
+ }
+
+ public HDFDeflateCompInfo(int l) {
+ ctype = HDFConstants.COMP_CODE_DEFLATE;
+ level = l;
+ }
+}
+
+
diff --git a/java/src/hdf/hdflib/HDFException.java b/java/src/hdf/hdflib/HDFException.java
new file mode 100644
index 0000000..bd5ccfc
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFException.java
@@ -0,0 +1,67 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+/**
+ * <p>
+ * The class HDFException returns errors from the HDF
+ * library.
+ * <p>
+ * Two sub-classes of HDFException are defined:
+ * <ol>
+ * <li>
+ * HDFLibraryException -- errors raised the HDF library code
+ * <li>
+ * HDFJavaException -- errors raised the HDF Java wrapper code
+ * </ol>
+ * <p>
+ * These exceptions will be sub-classed to represent specific
+ * error conditions, as needed.
+ * <p>
+ * The only specific exception currently defined is
+ * HDFNotImplementedException, indicating a function that is part
+ * of the HDF API, but which cannot or will not be implemented
+ * for Java.
+ */
+public class HDFException extends Exception
+{
+ static public final String OutOfMemoryMessage="ERROR: HDF Library: Out of memory";
+ static public final String HDFExceptionMessage="ERROR: HDF Library Error";
+ static public final String HDFMessage="ERROR: Unknown HDF Error";
+
+ protected int HDFerror;
+ protected String msg;
+
+ public HDFException() {
+ super();
+ HDFerror = 0;
+ msg = null;
+ }
+
+ public HDFException(String s) {
+ super();
+ msg = s;
+ }
+
+ public HDFException(int err) {
+ super();
+ HDFerror = err;
+ msg = null;
+ }
+
+ @Override
+ public String getMessage() {
+ return msg;
+ }
+}
diff --git a/java/src/hdf/hdflib/HDFIMCOMPCompInfo.java b/java/src/hdf/hdflib/HDFIMCOMPCompInfo.java
new file mode 100644
index 0000000..52752dc
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFIMCOMPCompInfo.java
@@ -0,0 +1,29 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+/**
+ * <p>
+ * This class is a container for the parameters to the HDF
+ * ``Image compression'' compression class.
+ * <p>
+ * In this case, no auxilliary information is needed.
+ */
+public class HDFIMCOMPCompInfo extends HDFOldCompInfo {
+
+ public HDFIMCOMPCompInfo () {
+ ctype = HDFConstants.COMP_IMCOMP;
+ }
+
+}
diff --git a/java/src/hdf/hdflib/HDFJPEGCompInfo.java b/java/src/hdf/hdflib/HDFJPEGCompInfo.java
new file mode 100644
index 0000000..90dae6d
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFJPEGCompInfo.java
@@ -0,0 +1,49 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+/**
+ * <p>
+ * This class is a container for the parameters to the HDF
+ * JPEG compression class.
+ * <p>
+ * In this case, the parameters are the quality and baseline.
+ */
+
+
+public class HDFJPEGCompInfo extends HDFOldCompInfo {
+
+ /* Struct to contain information about how to compress */
+ /* or decompress a JPEG encoded 24-bit image */
+
+ public int quality; /* Quality factor for JPEG compression, should be from */
+ /* 0 (terrible) to 100 (very good) */
+
+ public int force_baseline; /* If force_baseline is set to TRUE then */
+ /* quantization tables are limited to */
+ /* 0..255 for JPEG baseline compability */
+ /* This is only an issue for quality */
+ /* settings below 24 */
+
+ public HDFJPEGCompInfo() {
+ ctype = HDFConstants.COMP_JPEG;
+ }
+
+ public HDFJPEGCompInfo(int qual, int fb) {
+ ctype = HDFConstants.COMP_JPEG;
+ quality = qual;
+ force_baseline = fb;
+ }
+
+}
diff --git a/java/src/hdf/hdflib/HDFJavaException.java b/java/src/hdf/hdflib/HDFJavaException.java
new file mode 100644
index 0000000..80869a1
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFJavaException.java
@@ -0,0 +1,40 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+/**
+ * <p>
+ * The class HDFJavaException returns errors from the Java
+ * wrapper of theHDF library.
+ * <p>
+ * These errors include Java configuration errors, security
+ * violations, and resource exhaustion.
+ */
+public class HDFJavaException extends HDFException
+{
+ String msg;
+
+ public HDFJavaException() {
+ HDFerror = 0;
+ }
+
+ public HDFJavaException(String s) {
+ msg = "HDFLibraryException: "+s;
+ }
+
+ @Override
+ public String getMessage() {
+ return msg;
+ }
+}
diff --git a/java/src/hdf/hdflib/HDFLibrary.java b/java/src/hdf/hdflib/HDFLibrary.java
new file mode 100644
index 0000000..5396455
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFLibrary.java
@@ -0,0 +1,3172 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+import java.io.File;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This is the Java interface for the HDF 4.2 library.
+ * <p>
+ * This code is the called by Java programs to access the
+ * entry points of the HDF 4.2 library.
+ * Each routine wraps a single HDF entry point, generally with the
+ * arguments and return codes analogous to the C interface.
+ * <hr>
+ * <p>
+ * These routines use the class <a href="./hdf.hdflib.HDFArray.html">HDFArray</a>
+ * to handle arrays of arbitrary type and shape.
+ * <hr>
+ * <p>
+ * <b>Mapping of arguments for Java</b>
+ *
+ * <p>
+ * In general, arguments to the HDF Java API are straightforward
+ * translations from the 'C' API described in the HDF Reference
+ * Manual.
+ *
+ * <center>
+ * <table border=2 cellpadding=4>
+ * <caption>
+ * <b>C types to Java types</b>
+ * </caption>
+ * <tr>
+ * <td>
+ * C
+ * </td>
+ * <td>
+ * Java
+ * </td>
+ * </tr>
+ * <tr>
+ * <td>
+ * int, intn, int32, uint32
+ * </td>
+ * <td>
+ * int
+ * </td>
+ * </tr>
+ * <tr>
+ * <td>
+ * short, uint16, int16
+ * </td>
+ * <td>
+ * short
+ * </td>
+ * </tr>
+ * <tr>
+ * <td>
+ * float, float32
+ * </td>
+ * <td>
+ * float
+ * </td>
+ * </tr>
+ * <tr>
+ * <td>
+ * double, float64
+ * </td>
+ * <td>
+ * double
+ * </td>
+ * </tr>
+ * <tr>
+ * <td>
+ * char, uchar, int8, uint8
+ * </td>
+ * <td>
+ * byte
+ * </td>
+ * </tr>
+ * <tr>
+ * <td>
+ * char * (<em>i.e.</em>, string)
+ * </td>
+ * <td>
+ * java.lang.String
+ * </td>
+ * </tr>
+ * <tr>
+ * <td>
+ * void
+ * </td>
+ * <td>
+ * void
+ * </td>
+ * </tr>
+ * <tr>
+ * <td>
+ * void *, VOIDP, char * (meaning ``any'')
+ * </td>
+ * <td>
+ * Special -- see HDFArray
+ * </td>
+ * </tr>
+ * </table>
+ * </center>
+ * <center>
+ * <b>General Rules for Passing Arguments and Results</b>
+ * </center>
+ * <p>
+ * In general, arguments passed <b>IN</b> to Java are the analogous
+ * basic types, as above.
+ * The exception is for arrays, which are discussed below.
+ * <p>
+ * The <i>return value</i> of Java methods is also the analogous
+ * type, as above.
+ * A major exception to that rule is that all HDF functions that
+ * return SUCCEED/FAIL are declared <i>boolean</i> in the
+ * Java version, rather than
+ * <i>intn</i> or whatever.
+ * (Functions that return a value or else FAIL are declared
+ * the equivalent to the C function.)
+ * <p>
+ * Java does not support pass by reference of arguments, so
+ * arguments that are returned through <b>OUT</b> parameters
+ * must be wrapped in an object or array.
+ * The Java API for HDF consistently wraps arguments in
+ * arrays.
+ * <p>
+ * For instance, a function that returns two integers is
+ * declared:
+ * <pre>
+ * void HDFdummy( int32* a1, int32* a2)
+ * </pre>
+ * For the Java interface, this would be declared:
+ * <pre>
+ * public static native void HDFdummy( int args[] );
+ * </pre>
+ * where <i>a1</i> is <i>args[0]</i>
+ * and <i>a2</i> is <i>args[1]</i>.
+ * <p>
+ * All the routines where this convention is used will have
+ * specific documentation of the details, given below.
+ * <p>
+ * <b>Arrays</b>
+ * <p>
+ * HDF needs to read and write multi-dimensional arrays
+ * of many types.
+ * The HDF API is self-describing, with the data for the
+ * array passed as a block of bytes, for instance,
+ * <pre>
+ * int SDreaddata(int sdsid, int32 *start, int32 * stride,
+ * int32 *count, VOIDP data);
+ * </pre>
+ * <p>
+ * where ``VOIDP'' means that the data may be any valid numeric
+ * type, and is a contiguous block of bytes that is the data
+ * for a multi-dimensional array.
+ * <p>
+ * For Java, this is a problem, as the type of data must
+ * be declared. Furthermore, multidimensional arrays
+ * are definitely <i>not</i> layed out contiguously
+ * in memory.
+ * It would be infeasible to declare a separate routine for
+ * every combination of number type and dimensionality.
+ * For that reason, the <b>HDFArray</b> class is used to
+ * discover the type, shape, and size of the data array
+ * at run time, and to convert to and from contigous
+ * bytes.
+ * The upshot is that the data can be passed as an ``Object'',
+ * and the Java API will translate to and from the appropriate
+ * bytes.
+ * So the function above would be declared:
+ * <pre>
+ * int SDreaddata(int sdsid, int[] start, int[] stride,
+ * int[] count, Object data);
+ * </pre>
+ * and the parameter <i>data</i> can be any multi-dimensional
+ * array of numbers, such as float[][], or int[][][].
+ * <center>
+ * <b>Compression and Chunk Information</b>
+ * </center>
+ * <p>
+ * The HDF library passes the parameters needed by compression
+ * and chunking through C structures (actually, unions).
+ * The Java interface passes these as instances of subclasses
+ * of class HDFCompInfo and HDFChunkInfo respectively.
+ *
+ * <p><b> See: </b><a href="hdf.hdflib.HDFChunkInfo.html">
+ * hdf.hdflib.HDFChunkInfo</a>,
+ * and
+ * <p><a href="hdf.hdflib.HDFCompInfo.html">
+ * hdf.hdflib.HDFCompInfo</a>.
+ * <hr>
+ */
+public class HDFLibrary implements java.io.Serializable
+{
+ /**
+ *
+ */
+ private static final long serialVersionUID = -1695429510319126910L;
+
+ public final static String HDFPATH_PROPERTY_KEY = "hdf.hdflib.HDFLibrary.hdflib";
+
+ private final static Logger log = LoggerFactory.getLogger(HDFLibrary.class);
+
+ private final static String JHI_VERSION= "3.99";
+ private static boolean isLibraryLoaded = false;
+
+ static { loadH4Lib(); }
+
+ public static void loadH4Lib()
+ {
+ if (isLibraryLoaded) // load only once
+ return;
+
+ // first try loading library via full path
+ String filename = System.getProperty(HDFPATH_PROPERTY_KEY, null);
+ if ((filename != null) && (filename.length() > 0)) {
+ File h4dll = new File(filename);
+ if (h4dll.exists() && h4dll.canRead() && h4dll.isFile()) {
+ try {
+ System.load(filename);
+ isLibraryLoaded = true;
+ }
+ catch (Throwable err) {
+ isLibraryLoaded = false;
+ }
+ finally {
+ log.info("HDF4 library: ");
+ log.debug(filename);
+ log.info((isLibraryLoaded ? "" : " NOT")
+ + " successfully loaded.");
+ }
+ }
+ else {
+ isLibraryLoaded = false;
+ throw (new UnsatisfiedLinkError("Invalid HDF4 library, "+filename));
+ }
+ }
+
+ // else load standard library
+ if (!isLibraryLoaded) {
+ String mappedName = null;
+ String s_libraryName = "hdf_java";
+ try {
+ mappedName = System.mapLibraryName(s_libraryName);
+ System.loadLibrary("hdf_java");
+ isLibraryLoaded = true;
+ }
+ catch (Throwable err) {
+ isLibraryLoaded = false;
+ }
+ finally {
+ log.info("HDF4 library: " + s_libraryName);
+ log.debug(" resolved to: " + mappedName + "; ");
+ log.info((isLibraryLoaded ? "" : " NOT")
+ + " successfully loaded from java.library.path");
+ }
+ }
+
+ try {
+ HDFLibrary.HDdont_atexit();
+ }
+ catch (HDFException e) {
+ System.exit(1);
+ }
+
+ /* Important! Exit quietly */
+ }
+
+ @Deprecated
+ public static final String getJHIVersion() { return JHI_VERSION; }
+
+ public static long Hopen(String filename) throws HDFException {
+ return Hopen(filename, HDFConstants.DFACC_RDONLY, 0);
+ }
+
+ public static long Hopen(String filename, int access) throws HDFException {
+ return Hopen(filename, access, 0);
+ }
+
+ public static native long Hopen(String filename, int access, int ndds) throws HDFException;
+
+ public static native boolean Hclose(long fid) throws HDFException;
+
+ public static native int HDdont_atexit() throws HDFException;
+
+ public static native boolean Hishdf(String fileName) throws HDFException;
+
+ public static int Hnumber(long fid) throws HDFException {
+ return Hnumber(fid, HDFConstants.DFTAG_WILDCARD);
+ }
+
+ public static native int Hnumber(long fid, int tagtype) throws HDFException;
+
+ public static native int DFKNTsize(long numbertype) throws HDFException;
+
+ public static native String HDgetNTdesc(int nt) throws HDFException;
+
+ public static native boolean Hcache(long file_id, int cache_switch) throws HDFException;
+
+/* not yet implemented
+ public static native boolean Hflushdd(long file_id) throws HDFException;
+*/
+
+ /**
+ *
+ * <b>Note:</b> the version of an HDF file is not well defined,
+ * it is not recommended that programs rely on these numbers.
+ * <p>
+ * @param file_id <b>IN</b>: long, the file descriptor returned by Hopen
+ * @param vers <b>OUT</b>: int[3], the major version, minor version,
+ * and release number of the file.
+ * @param string <b>OUT</b>: String[1], the version string
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call, but is not yet implemented.
+ *
+ * @return the major, minor, and release number are returned
+ * in the array of ints, and a string is returned in the string.
+ */
+ public static native boolean Hgetfileversion(long file_id, int[] vers,
+ String []string) throws HDFException;
+
+ /**
+ * @param vers <b>OUT</b>: int[3], the major version, minor version,
+ * and release number of the HDF library.
+ * @param string <b>OUT</b>: String[1], the version string
+ *
+ * @return the major, minor, and release number are returned
+ * in the array of ints, and a string is returned in the string.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call, but is not yet implemented.
+ */
+ public static native boolean Hgetlibversion(int[] vers, String[] string) throws HDFException;
+
+ public static native boolean Hsetaccesstype(long h_id, int access_type) throws HDFException;
+
+ public static native boolean Hsync(long file_id) throws HDFException;
+
+ public static native int ANstart(long fid) throws HDFException;
+
+ public static native boolean ANend(long an_id) throws HDFException;
+
+ public static native boolean ANendaccess(long an_id) throws HDFException;
+
+ /**
+ * @param an_id <b>IN</b>: the AN interface id, returned by ANstart
+ * @param info <b>OUT</b>: int[4], n_file_label, n_file_desc,
+ * n_data_label, n_data_desc
+ *
+ * @return four integer parameters:
+ * info[0] = n_file_label, info[1] = n_file_desc,
+ * info[2] = n_data_label, info[3] = n_data_desc
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ */
+ public static native boolean ANfileinfo(long an_id, int[] info) throws HDFException;
+
+ public static native int ANselect(long an_id, int index, int anntype) throws HDFException;
+
+ public static native int ANnumann(long an_id, int anntype, short tag, short ref) throws HDFException;
+
+ public static native short ANatype2tag(int antag) throws HDFException;
+
+ public static native int ANtag2atype(short anttype) throws HDFException;
+
+ /**
+ * @param an_id <b>IN</b>: the AN interface id, returned by ANstart
+ * @param anntype <b>IN</b>: the number type, as defined in HDFConstants
+ * @param tag <b>IN</b>: the HDF tag
+ * @param ref <b>IN</b>: the HDF ref
+ * @param ann_list <b>OUT</b>: int[], an array of annotation identifiers.
+ * The array must be long enough to hold the number of annotations
+ * returned by ANnumann
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return an array of integers, which are the identifiers of
+ * the annotations
+ */
+ public static native int ANannlist(long an_id, int anntype, short tag, short ref,
+ int[] ann_list) throws HDFException;
+
+ public static native int ANannlen(long ann_id) throws HDFException;
+
+ /**
+ * @param ann_id <b>IN</b>: the AN interface id, returned by ANstart
+ * @param annbuf <b>OUT</b>: String[1], the annotation is returned as annbuf[0].
+ * @param maxlen <b>IN</b>: int, the maximum length of the string.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return an annotation string: annbuf[0] = the annotation
+ */
+ public static native boolean ANreadann(long ann_id, String[] annbuf, int maxlen) throws HDFException;
+
+ public static native int ANcreate(long an_id, short tag, short ref, int type) throws HDFException;
+
+ public static native int ANcreatef(long an_id, int type) throws HDFException;
+
+ /**
+ * @param an_id <b>IN</b>: the AN interface id, returned by ANstart
+ * @param index <b>IN</b>: the index of the annotation
+ * @param type <b>IN</b>: the type of the annotation
+ * @param tagref <b>OUT</b>: short[2], the tag and ref of the annotation
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the tag and ref: tagref[0] = tag, tagref[1] = ref
+ */
+ public static native int ANget_tagref(long an_id, int index, int type, short[] tagref) throws HDFException;
+
+ /**
+ * @param an_id <b>IN</b>: the AN interface id, returned by ANstart
+ * @param tagref <b>OUT</b>: short[2], the tag and ref of the annotation
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the tag and ref: tagref[0] = tag, tagref[1] = ref
+ */
+ public static native boolean ANid2tagref(long an_id, short[] tagref) throws HDFException;
+
+ public static native long ANtagref2id(long an_id, short tag, short ref) throws HDFException;
+
+ public static native boolean ANwriteann(long ann_id, String label, int ann_length) throws HDFException;
+
+ public static native boolean DFPaddpal(String filename, byte[] palette) throws HDFException;
+
+ public static native boolean DFPgetpal(String filename, byte[] palette) throws HDFException;
+
+ public static native short DFPlastref() throws HDFException;
+
+ public static native int DFPnpals(String filename) throws HDFException;
+
+ public static native boolean DFPputpal(String filename, byte[] palette, int overwrite, String filemode) throws HDFException;
+
+ /**
+ * @param filename <b>IN</b>: String, the name of the HDF file
+ * @param palette <b>IN</b>: byte[] the palette
+ * @param overwrite <b>IN</b>: boolean, converted to 1 == true, 0 == false
+ * to call the HDF library
+ * @param filemode <b>IN</b>: if "a" append palette to file, "w" create new file
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return true on success
+ */
+ public static boolean DFPputpal(String filename, byte[] palette, boolean overwrite, String filemode) throws HDFException
+ {
+ if (overwrite) {
+ return DFPputpal(filename, palette, 1, filemode);
+ }
+ else {
+ return DFPputpal(filename, palette, 0, filemode);
+ }
+ }
+
+
+ public static native boolean DFPreadref(String filename, short ref) throws HDFException;
+
+ public static native short DFPrestart() throws HDFException;
+
+ public static native boolean DFPwriteref(String filename, short ref) throws HDFException;
+
+ public static native int GRstart(long fid) throws HDFException;
+
+
+ public static native boolean GRend(long grid) throws HDFException;
+
+ /**
+ * @param grid <b>IN</b>: the GR interface id, returned by GRstart
+ * @param args <b>OUT</b>: int[2], n_datasets and n_file_attrs
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the file info: args[0] = n_datasets, args[1] = n_file_attrs
+ */
+ public static native boolean GRfileinfo(long grid, int [] args) throws HDFException;
+
+ /**
+ * @param sdsid <b>IN</b>: the SD identifier returned by SDselect
+ * @param chunk_def <b>OUT</b>: HDFChunkInfo, the chunking info
+ * @param flag <b>OUT</b>: int[1], the type of chunking
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return c_info contains information about the chunking method,
+ * flags[0] == the chunking flags
+ *
+ * <p><b>NOTE:</b>The chunking algorithm-specific information is
+ * passed in an appropriate sub-class of HDFChunkInfo.
+ */
+ public static native boolean GRgetchunkinfo(long sdsid, HDFChunkInfo chunk_def, int[] flag) throws HDFException;
+
+ public static native int GRselect(long grid, int index) throws HDFException;
+
+ public static native int GRnametoindex(long grid, String name) throws HDFException;
+
+ /**
+ * @param grid <b>IN</b>: the GR interface id, returned by GRstart
+ * @param gr_name <b>OUT</b>: name of raster image
+ * @param args <b>OUT</b>: int[4], image info:
+ * number of components in the image,
+ * data type of the image data,
+ * interlace mode of the stored image data,
+ * number of attributes assigned to the image
+ * @param dim_sizes <b>OUT</b>: int[2], dim_sizes
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ *
+ * @return the file info: String[0] = gr_name, args[0] = ncomp,
+ * args[1] = data_type, args[2] = interlace, args[3] = num_attrs
+ *
+ * <p><b>NOTE:</b> the parameters for the Java interface are not in
+ * the same order as the C interface.
+ */
+ public static native boolean GRgetiminfo(long grid, String[] gr_name, int[] args, int[] dim_sizes) throws HDFException;
+
+ /**
+ * @param grid <b>IN</b>: the GR interface id, returned by GRstart
+ * @param start <b>IN</b>: int[2], start
+ * @param stride <b>IN</b>: int[2], stride. If the stride parameter is set to NULL,
+ * a stride of 1 will be assumed.
+ * strides of 0 are illegal.
+ * @param count <b>IN</b>: int[2], count
+ * @param data <b>OUT</b>: byte[], data
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the the data in the form of a continous array of
+ * bytes.
+ *
+ * <p><b>NOTE:</b> to read into a Java 2D array use the alternative
+ * routine below.
+ */
+ public static native boolean GRreadimage(long grid, int[] start, int[] stride,
+ int[] count, byte[] data) throws HDFException;
+
+ /**
+ * @param grid <b>IN</b>: the GR interface id, returned by GRstart
+ * @param start <b>IN</b>: int[2], start
+ * @param stride <b>IN</b>: int[2], stride. If the stride parameter is set to NULL,
+ * a stride of 1 will be assumed.
+ * strides of 0 are illegal.
+ * @param count <b>IN</b>: int[2], count
+ * @param theData <b>OUT</b>: Object, a Java array of appropriate
+ * type, dimensions, and size.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the the data in the Java array.
+ *
+ * <p><b>Note:</b> reads the data as bytes and converts to
+ * the Java array.
+ */
+ public static boolean GRreadimage(long grid, int[] start, int[] stride,
+ int[] count, Object theData) throws HDFException
+ {
+ byte[] data;
+ boolean rval;
+
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.emptyBytes();
+ rval = GRreadimage(grid, start, stride, count, data);
+ theData = theArray.arrayify(data);
+ return rval;
+ }
+
+ public static native boolean GRendaccess(long riid) throws HDFException;
+
+/*
+ "[Later]"
+ public static native int GRgetdimid(long riid, int index) throws HDFException;
+*/
+
+/*
+ "[Later]"
+ public static native boolean GRdiminfo(long dimid, char[] name, int[] diminfo[]) throws HDFException;
+*/
+
+ public static native short GRidtoref(long riid) throws HDFException;
+
+ public static native int GRreftoindex(long grid, short ref) throws HDFException;
+
+ public static native boolean GRreqlutil(long riid, int interlace) throws HDFException;
+
+ public static native boolean GRreqimageil(long rrid, int interlace) throws HDFException;
+
+ public static native int GRgetlutid(long rrid, int index) throws HDFException;
+
+ public static native int GRgetnluts(long rrid) throws HDFException;
+
+ /**
+ * @param lutid <b>IN</b>: the palette identifier returned by GRgetlutid
+ * @param args <b>OUT</b>: int[4], palette info:
+ * Number of components in the palette,
+ * Data type of the palette data,
+ * Interlace mode of the stored palette data,
+ * Number of color lookup table entries in the palette.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the palette info: args[0] = ncomp, args[1] = data_type,
+ * args[2] = interlace, args[3] = num_entries
+ *
+ */
+ public static native boolean GRgetlutinfo(long lutid, int[] args) throws HDFException;
+
+ /**
+ * @param lutid <b>IN</b>: the palette identifier returned by GRgetlutid
+ * @param data <b>OUT</b>: byte[], palette data, in bytes
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the palette data: as bytes
+ *
+ * <p><b>NOTE:</b> to read into a Java 1D array use the alternative
+ * routine below.
+ */
+ public static native boolean GRreadlut(long lutid, byte[] data) throws HDFException;
+
+ /**
+ * @param lutid <b>IN</b>: the palette identifier returned by GRgetlutid
+ * @param theData <b>OUT</b>: Object, palette data, an Java array
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the palette data: as a Java array
+ *
+ * <p><b>Note:</b> reads the data as bytes and converts to
+ * the Java array.
+ */
+ public static boolean GRreadlut(long lutid, Object theData) throws HDFException
+ {
+ byte[] data;
+ boolean rval;
+
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.emptyBytes();
+ rval = GRreadlut(lutid, data);
+ theData = theArray.arrayify(data);
+ return rval;
+ }
+
+ /**
+ * @param id <b>IN</b>: the GR identifier returned by GRstart
+ * @param index <b>IN</b>: the index of the attribute
+ * @param name <b>OUT</b>: String[1], the name of the attribute
+ * @param argv <b>OUT</b>: int[2], the type and length of the
+ * attribute
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the name, type, and lenght of the attribute:
+ * name[0] = name, argv[0] = data_type, argv[1] = length
+ */
+ public static native boolean GRattrinfo(long id, int index, String[] name, int[] argv) throws HDFException;
+
+ /**
+ * @param id <b>IN</b>: the GR identifier returned by GRstart
+ * @param index <b>IN</b>: index of the attribute for info
+ * @param data <b>OUT</b>: byte[], attribute data, in bytes
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the attribute data: as bytes
+ *
+ * <p><b>NOTE:</b> to read into a Java 1D array use the alternative
+ * routine below.
+ */
+ public static native boolean GRgetattr(long id, int index, byte[] data) throws HDFException;
+
+ /**
+ * @param id <b>IN</b>: the GR identifier returned by GRstart
+ * @param index <b>IN</b>: index of the attribute for info
+ * @param theData <b>OUT</b>: Object, attribute data, an Java array
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the attribute data: as a Java array
+ *
+ * <p><b>Note:</b> reads the data as bytes and converts to
+ * the Java array.
+ */
+ public static boolean GRgetattr(long id, int index, Object theData) throws HDFException
+ {
+ byte[] data;
+ boolean rval;
+
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.emptyBytes();
+ rval = GRgetattr(id, index, data);
+ theData = theArray.arrayify(data);
+ return rval;
+ }
+
+ public static native int GRfindattr(long id, String name) throws HDFException;
+
+ public static native int GRcreate(long gr_id, String name, int ncomp,
+ long data_type, int interlace_mode, int[] dim_sizes) throws HDFException;
+
+ public static native short GRluttoref(long pal_id) throws HDFException;
+
+ /**
+ * @param gr_id <b>IN</b>: the GR identifier returned by GRstart
+ * @param attr_name <b>IN</b>: the name of the attribute
+ * @param data_type <b>IN</b>: the number type of the data (should
+ * be DFNT_CHAR)
+ * @param count <b>IN</b>: the length the data (lenght of 'values')
+ * @param values <b>IN</b>: the the attribute to write -- A String
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>NOTE:</b>This routine writes a attribute that is
+ * a String. Alternative methods write data of other types.
+ *
+ * @return true on success
+ */
+ public static native boolean GRsetattr(long gr_id, String attr_name,
+ long data_type, int count, String values) throws HDFException;
+
+ /**
+ * @param gr_id <b>IN</b>: the GR identifier returned by GRstart
+ * @param attr_name <b>IN</b>: the name of the attribute
+ * @param data_type <b>IN</b>: the number type of the data
+ * @param count <b>IN</b>: the length the data (lenght of 'values')
+ * @param values <b>IN</b>: the the attribute to write -- in an
+ * array of bytes
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>NOTE:</b>This routine writes the attribute as an array
+ * of bytes. <b>DO NOT USE THIS TO WRITE A STRING.</b> This
+ * is intended for numeric data that has been flattened into
+ * bytes.
+ *
+ * @return true on success
+ */
+ public static native boolean GRsetattr(long gr_id, String attr_name,
+ long data_type, int count, byte[] values) throws HDFException;
+
+ /**
+ * @param gr_id <b>IN</b>: the GR identifier returned by GRstart
+ * @param attr_name <b>IN</b>: the name of the attribute
+ * @param data_type <b>IN</b>: the number type of the data
+ * @param count <b>IN</b>: the length the data (lenght of 'values')
+ * @param theData <b>IN</b>: Object -- the value to be written,
+ * a Java array of numbers.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>NOTE:</b>This routine converts the Java array to bytes
+ * then writes it.
+ * <b>DO NOT USE THIS TO WRITE A STRING.</b>
+ *
+ * @return true on success
+ */
+ public static boolean GRsetattr(long gr_id, String attr_name,
+ long data_type, int count, Object theData) throws HDFException
+ {
+ byte[] data;
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.byteify();
+ return GRsetattr(gr_id, attr_name, data_type, count, data);
+ }
+ /**
+ * @param sdsid <b>IN</b>: the SD identifier returned by SDselect
+ * @param chunk_def <b>IN</b>: HDFChunkInfo, the chunking info
+ * @param flags <b>IN</b>: the type of chunking
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>NOTE:</b>The chunking algorithm-specific information is
+ * passed in an appropriate sub-class of HDFChunkInfo.
+ *
+ * @return true on success
+ */
+ public static native boolean GRsetchunk(long sdsid, HDFChunkInfo chunk_def, int flags) throws HDFException;
+
+ public static native int GRsetchunkcache(long sdsid, int maxcache, int flags) throws HDFException;
+ /**
+ * @param ri_id <b>IN</b>: the GR identifier returned by GRstart
+ * @param comp_type <b>IN</b>: the type of compression
+ * @param c_info <b>IN</b>: HDFCompInfo, the compression info
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>NOTE:</b>The compression algorithm specific information is
+ * passed in an appropriate sub-class of HDFCompInfo.
+ *
+ * @return true on success
+ */
+ public static native boolean GRsetcompress(long ri_id, int comp_type, HDFCompInfo c_info) throws HDFException;
+
+ public static native boolean GRgetcompress(long ri_id, HDFCompInfo c_info) throws HDFException;
+
+ public static native boolean GRgetcompinfo(long ri_id, HDFCompInfo c_info) throws HDFException;
+
+ public static native boolean GRsetexternalfile(long ri_id, String filename, int offset) throws HDFException;
+
+ /**
+ * @param grid <b>IN</b>: the GR interface id, returned by GRstart
+ * @param start <b>IN</b>: int[2], start
+ * @param stride <b>IN</b>: int[2], stride. If the stride parameter is set to NULL,
+ * a stride of 1 will be assumed.
+ * strides of 0 are illegal.
+ * @param edge <b>IN</b>: int[2], count
+ * @param data <b>IN</b>: byte[], data to be written
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>NOTE:</b> to write from a Java 2D array use the alternative
+ * routine below.
+ *
+ * @return true on success
+ */
+
+ public static native boolean GRwriteimage(long grid, int[] start, int[] stride,
+ int[] edge, byte[] data) throws HDFException;
+
+ /**
+ * @param grid <b>IN</b>: the GR interface id, returned by GRstart
+ * @param start <b>IN</b>: int[2], start
+ * @param stride <b>IN</b>: int[2], stride. If the stride parameter is set to NULL,
+ * a stride of 1 will be assumed.
+ * strides of 0 are illegal.
+ * @param edge <b>IN</b>: int[2], count
+ * @param theData <b>IN</b>: Object, a Java array of appropriate
+ * type, dimensions, and size.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>Note:</b> converts the Java array to a contiguous
+ * block of bytes appropriate for C, and then writes the bytes.
+ *
+ * @return true on success
+ */
+ public static boolean GRwriteimage(long grid, int[] start, int[] stride,
+ int[] edge, Object theData) throws HDFException
+ {
+ byte[] data;
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.byteify();
+ return GRwriteimage(grid, start, stride, edge, data);
+ }
+
+ /**
+ * @param pal_id <b>IN</b>: the palette identifier returned by GRgetlutid
+ * @param ncomp <b>IN</b>: int, number of components
+ * @param data_type <b>IN</b>: int, number type
+ * @param interlace <b>IN</b>: int, interlace
+ * @param num_entries <b>IN</b>: int, number of entries
+ * @param pal_data <b>IN</b>: byte[], palette data to be written--as bytes
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ *
+ * <p><b>NOTE:</b> to write from a Java array of numbers use
+ * the alternative routine below.
+ *
+ * @return true on success
+ */
+
+ public static native boolean GRwritelut(long pal_id, int ncomp, int data_type,
+ int interlace, int num_entries, byte[] pal_data) throws HDFException;
+
+ /**
+ * @param pal_id <b>IN</b>: the palette identifier returned by GRgetlutid
+ * @param ncomp <b>IN</b>: int, number of components
+ * @param data_type <b>IN</b>: int, number type
+ * @param interlace <b>IN</b>: int, interlace
+ * @param num_entries <b>IN</b>: int, number of entries
+ * @param theData <b>IN</b>: Object, palette data to be written, any
+ * number type.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ *
+ * <p><b>Note:</b> converts the Java array to a contiguous
+ * block of bytes appropriate for C, and then writes the bytes.
+ *
+ * @return true on success
+ */
+ public static boolean GRwritelut(long pal_id, int ncomp, int data_type,
+ int interlace, int num_entries, Object theData) throws HDFException
+ {
+ byte[] data;
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.byteify();
+ return GRwritelut(pal_id, ncomp, data_type, interlace, num_entries, data);
+ }
+
+ /**
+ * @param sdsid <b>IN</b>: the GR interface id, returned by SDselect
+ * @param origin <b>IN</b>: int[], origin
+ * @param theData <b>OUT</b>: byte[], the data in an array of bytes
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>NOTE:</b> to read into a Java array use the alternative
+ * routine below.
+ *
+ * @return true on success
+ */
+ public static native boolean GRreadchunk(long sdsid, int[] origin, byte[] theData) throws HDFException;
+
+ /**
+ * @param grid <b>IN</b>: the GR interface id, returned by SDselect
+ * @param origin <b>IN</b>: int[], origin
+ * @param theData <b>IN</b>: Object, a Java array of appropriate
+ * type, dimensions, and size.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>Note:</b> reads the data as a contiguous
+ * array of bytes and then converts it to an appropriate Java object.
+ *
+ * @return true on success
+ */
+ public static boolean GRreadchunk(long grid, int[] origin, Object theData) throws HDFException
+ {
+ byte[] data;
+ boolean rval;
+
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.emptyBytes();
+ rval = GRreadchunk(grid, origin, data);
+ theData = theArray.arrayify(data);
+ return rval;
+ }
+
+ public static native boolean HDFclose(long file_id) throws HDFException;
+
+ public static native long HDFopen(String filename, int access, short n_dds) throws HDFException;
+
+ public static native short HEvalue(int level) throws HDFException;
+
+ public static native String HEstring(int error_code) throws HDFException;
+
+ public static native void HEprint(Object stream, int level) throws HDFException;
+
+ public static native boolean HXsetcreatedir(String dir) throws HDFException;
+
+ public static native boolean HXsetdir(String dir) throws HDFException ;
+
+ public static native long SDstart(String filename, int accessmode) throws HDFException;
+
+ public static native boolean SDend(long sdid) throws HDFException;
+
+ /**
+ * @param sdid <b>IN</b>: the SD interface id, returned by SDstart
+ * @param argv <b>OUT</b>: int[2],
+ * Number of datasets in the file,
+ * Number of global attributes in the file
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the file info: argv[0] = n_datasets, argv[1] = n_file_attrs
+ */
+ public static native boolean SDfileinfo(long sdid, int[] argv) throws HDFException;
+
+ public static native long SDselect(long sdid, int index) throws HDFException;
+
+ public static native int SDnametoindex(long sdid, String name) throws HDFException;
+
+ /**
+ * @param sdsid <b>IN</b>: the SD interface id, returned by SDselect
+ * @param name <b>OUT</b>: String[1], the name of the dataset
+ * @param args <b>OUT</b>: int[3], dataset info:
+ * number of dimensions (rank),
+ * data type for the data stored in the dataset,
+ * number of "netCDF-style" attributes for this dataset
+ * @param dimsizes <b>OUT</b>: int[(rank)], sizes of dimensions
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the file info: name[0] = gr_name, args[0] = rank,
+ * args[1] = data_type, args[2] = nattrs, dim_sizes[] = dimensions
+ *
+ * <p><b>NOTE:</b> the parameters for the Java interface are not in
+ * the same order as the C interface.
+ */
+ public static native boolean SDgetinfo(long sdsid, String[ ]name,
+ int[] dimsizes, int[] args) throws HDFException;
+
+
+ /**
+ * @param sdsid <b>IN</b>: the SD interface id, returned by SDselect
+ * @param emptySDS <b>OUT</b>: int[1], 1 if the SDS is empty, 0 if has data
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return true on success
+ */
+ public static native boolean SDcheckempty(long sdsid, int[] emptySDS) throws HDFException;
+
+ /**
+ * @param sdsid <b>IN</b>: the SD interface id, returned by SDselect
+ * @param start <b>IN</b>: int[], start
+ * @param stride <b>IN</b>: int[], stride
+ * @param count <b>IN</b>: int[], count
+ * @param data <b>OUT</b>: byte[], data
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the the data in the form of a continous array of
+ * bytes.
+ *
+ * <p><b>NOTE:</b> to read into a Java array use the alternative
+ * routine below.
+ */
+ public static native boolean SDreaddata(long sdsid, int[] start, int[] stride,
+ int[] count, byte[] data) throws HDFException;
+
+ /**
+ * @param sdsid <b>IN</b>: the SD interface id, returned by SDselect
+ * @param start <b>IN</b>: int[], start
+ * @param stride <b>IN</b>: int[], stride
+ * @param count <b>IN</b>: int[], count
+ * @param theData <b>OUT</b>: Object, a Java array of appropriate
+ * type, dimensions, and size.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the the data in the Java array.
+ *
+ * <p><b>Note:</b> reads the data as bytes and converts to
+ * the Java array.
+ */
+ public static boolean SDreaddata(long sdsid, int[] start, int[] stride,
+ int[] count, Object theData) throws HDFException
+ {
+ boolean status = false;
+ boolean is1D = false;
+
+ Class dataClass = theData.getClass();
+ if (!dataClass.isArray()) {
+ throw (new HDFJavaException("SDreaddata: data is not an array"));
+ }
+
+ String cname = dataClass.getName();
+ is1D = (cname.lastIndexOf('[') ==cname.indexOf('['));
+ char dname = cname.charAt(cname.lastIndexOf("[")+1);
+
+ if (is1D && (dname == 'B')) {
+ status = SDreaddata(sdsid, start, stride, count, (byte[])theData);
+ }
+ else if (is1D && (dname == 'S')) {
+ status = SDreaddata_short(sdsid, start, stride, count, (short[])theData);
+ }
+ else if (is1D && (dname == 'I')) {
+ status = SDreaddata_int(sdsid, start, stride, count, (int[])theData);
+ }
+ else if (is1D && (dname == 'J')) {
+ status = SDreaddata_long(sdsid, start, stride, count, (long[])theData);
+ }
+ else if (is1D && (dname == 'F')) {
+ status = SDreaddata_float(sdsid, start, stride, count, (float[])theData);
+ }
+ else if (is1D && (dname == 'D')) {
+ status = SDreaddata_double(sdsid, start, stride, count, (double[])theData);
+ }
+ else {
+ byte[] data;
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.emptyBytes();
+ status = SDreaddata(sdsid, start, stride, count, data);
+ theData = theArray.arrayify(data);
+ }
+
+ return status;
+ }
+
+ ////////////////////////////////////////////////////////////////////
+ // //
+ // New APIs for read data from library //
+ // Using SDreaddata(..., Object buf) requires function calls //
+ // theArray.emptyBytes() and theArray.arrayify( buf), which //
+ // triples the actual memory needed by the data set. //
+ // Using the following APIs solves the problem. //
+ // //
+ ////////////////////////////////////////////////////////////////////
+
+ public static native boolean SDreaddata_short(long sdsid, int[] start, int[] stride,
+ int[] count, short[] theData) throws HDFException;
+
+ public static native boolean SDreaddata_int(long sdsid, int[] start, int[] stride,
+ int[] count, int[] theData) throws HDFException;
+
+ public static native boolean SDreaddata_long(long sdsid, int[] start, int[] stride,
+ int[] count, long[] theData) throws HDFException;
+
+ public static native boolean SDreaddata_float(long sdsid, int[] start, int[] stride,
+ int[] count, float[] theData) throws HDFException;
+
+ public static native boolean SDreaddata_double(long sdsid, int[] start, int[] stride,
+ int[] count, double[] theData) throws HDFException;
+
+ public static native boolean SDendaccess(long sdsid) throws HDFException;
+
+ public static native long SDgetdimid(long sdsid, int index) throws HDFException;
+
+ /**
+ * @param dimid <b>IN</b>: the dimension id, returned by SDgetdimid
+ * @param name <b>OUT</b>: String[1], the dimension name
+ * @param argv <b>OUT</b>: int[3], size of the name string,
+ * number type of data in the array, # attributes for the dimension
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return name[0] = name, argv[0] = count, argv[1] = data_type, argv[2] = nattr
+ */
+ public static native boolean SDdiminfo(long dimid, String[] name, int[] argv) throws HDFException;
+
+ public static native int SDidtoref(long sdsid) throws HDFException;
+
+ public static native int SDreftoindex(long sdid, int ref) throws HDFException;
+
+ /**
+ * @param id <b>IN</b>: id of a file, SDS, or dimension
+ * @param index <b>IN</b>: index of the attribute
+ * @param name <b>OUT</b>: String[1], the name of the attribute
+ * @param argv <b>OUT</b>: int[2], number type of the attribute,
+ * number of values in the attribute
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return name[0] = attr_name, argv[0] = data_type, argv[1] = count
+ */
+ public static native boolean SDattrinfo(long id, int index, String[] name, int[] argv) throws HDFException;
+
+ /**
+ * @param id <b>IN</b>: id of a file, SDS, or dimension
+ * @param index <b>IN</b>: index of the attribute
+ * @param data <b>OUT</b>: byte[], data
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the the data in the form of a continous array of
+ * bytes.
+ *
+ * <p><b>NOTE:</b> to read into a Java array use the alternative
+ * routine below.
+ */
+ public static native boolean SDreadattr(long id, int index, byte[] data) throws HDFException;
+
+ /**
+ * @param id <b>IN</b>: id of a file, SDS, or dimension
+ * @param index <b>IN</b>: index of the attribute
+ * @param theData <b>OUT</b>: Object, a Java array of appropriate
+ * type, dimensions, and size.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the the data in the Java array.
+ *
+ * <p><b>Note:</b> reads the data as bytes and converts to
+ * the Java array.
+ */
+ public static boolean SDreadattr(long id, int index, Object theData) throws HDFException
+ {
+ byte[] data;
+ boolean rval;
+
+ Class theClass = theData.getClass();
+ String name = theClass.getName();
+ if (name.equals("java.lang.String")) {
+ data = ((String)theData).getBytes();
+ rval = SDreadattr(id, index, data);
+ theData = new String(data);
+ }
+ else {
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.emptyBytes();
+ rval = SDreadattr(id, index, data);
+ theData = theArray.arrayify(data);
+ }
+ return rval;
+ }
+
+ public static native long SDfindattr(long id, String name) throws HDFException;
+
+ public static native boolean SDiscoordvar(long sdsid) throws HDFException;
+
+ /**
+ * @param sdsid <b>IN</b>: id of the SDS as returned by SDselect
+ * @param argv <b>OUT</b>: double[4], calibration information:
+ * calibration factor
+ * calibration error
+ * offset
+ * offset error
+ * @param NT <b>OUT</b>: int[1], number type of uncalibrated data
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return argv[0] = cal, argv[1] = cal_err,
+ * argv[2] = offset, argv[3] = offset_err,
+ * NT[0] = data_type
+ */
+ public static native boolean SDgetcal(long sdsid, double[] argv, int[] NT) throws HDFException;
+
+ /**
+ * @param sdsid <b>IN</b>: id of the SDS as returned by SDselect
+ * @param strings <b>OUT</b>: String[4], data information strings:
+ * label
+ * unit
+ * print format
+ * coordinate system
+ * @param len <b>IN</b>: int, max len of string (not needed by
+ * Java -- the HDFLibrary interface will handle this)
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return strings[0] = label, strings[1] = unit,
+ * strings[2] = format, strings[3] = coordsys,
+ */
+ public static native boolean SDgetdatastrs(long sdsid, String[] strings, int len) throws HDFException;
+
+ /**
+ * @param dimid <b>IN</b>: id of the SDS as returned by SDselect
+ * @param args <b>OUT</b>: String[3], data information strings:
+ * label
+ * unit
+ * print format
+ * @param len <b>IN</b>: int, max len of string (not needed by
+ * Java -- the HDFLibrary interface will handle this)
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return strings[0] = label, strings[1] = unit, strings[2] = format
+ */
+ public static native boolean SDgetdimstrs(long dimid, String[] args, int len) throws HDFException;
+
+ /**
+ * @param dimid <b>IN</b>: id of a dimension as returned by SDgetdimid
+ * @param data <b>OUT</b>: byte[], data
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the the data in the form of a continous array of bytes.
+ *
+ * <p><b>NOTE:</b> to read into a Java array use the alternative
+ * routine below.
+ */
+ public static native boolean SDgetdimscale(long dimid, byte[] data) throws HDFException;
+
+ /**
+ * @param dimid <b>IN</b>: id of a dimension as returned by SDgetdimid
+ * @param theData <b>OUT</b>: Object, a Java array of appropriate
+ * type and size.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the the data in the Java array.
+ *
+ * <p><b>Note:</b> reads the data as bytes and converts to
+ * the Java array.
+ */
+ public static boolean SDgetdimscale(long dimid, Object theData) throws HDFException
+ {
+ byte[] data;
+ boolean rval;
+
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.emptyBytes();
+ rval = SDgetdimscale(dimid, data);
+ theData = theArray.arrayify(data);
+ return rval;
+ }
+
+ /**
+ * @param sdsid <b>IN</b>: id of the SDS as returned by SDselect
+ * @param fillValue <b>OUT</b>: byte[], data
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the the fill value in the form of a continous array of bytes.
+ *
+ * <p><b>NOTE:</b> to read into a Java variable use the alternative
+ * routine below.
+ */
+ public static native boolean SDgetfillvalue(long sdsid, byte[] fillValue) throws HDFException;
+
+ /**
+ * @param sdsid <b>IN</b>: id of the SDS as returned by SDselect
+ * @param theFillValue <b>OUT</b>: Object[1], one object of
+ * appropriate type
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the the data in the Java array: theFillValue[0] = fillValue
+ *
+ * <p><b>Note:</b> the routine calls SDgetinfo to determine the
+ * correct type, reads the data as bytes, and converts to the
+ * appropriate Java object.
+ */
+ public static boolean SDgetfillvalue(long sdsid, Object[] theFillValue) throws HDFException
+ {
+ int[] SDInfo = new int[3];
+ int NT;
+ String datasetname = new String(" ");
+ String ss[] = new String[1];
+ ss[0] = datasetname;
+ int dimsize[] = new int[16];
+ SDgetinfo(sdsid, ss, dimsize, SDInfo);
+ datasetname = ss[0];
+ byte[] d1 = new byte[8];
+ boolean rval;
+ rval = SDgetfillvalue(sdsid, d1);
+ if (rval == false) {
+ return(rval);
+ }
+ NT = SDInfo[1];
+ if ((NT & HDFConstants.DFNT_LITEND) != 0) {
+ NT -= HDFConstants.DFNT_LITEND;
+ }
+ if ((NT == HDFConstants.DFNT_INT8) || (NT == HDFConstants.DFNT_CHAR8) || (NT == HDFConstants.DFNT_CHAR)) {
+ theFillValue[0] = new Byte(d1[0]);
+ }
+ else if ((NT == HDFConstants.DFNT_UINT8) || (NT == HDFConstants.DFNT_UCHAR8) || (NT == HDFConstants.DFNT_UCHAR8)) {
+ Byte f = new Byte(d1[0]);
+ if (f.shortValue() < 0) {
+ theFillValue[0] = new Short((short)(f.intValue() + 256));
+ }
+ else {
+ theFillValue[0] = new Short(f.shortValue());
+ }
+ }
+ else if ((NT == HDFConstants.DFNT_INT16) || (NT == HDFConstants.DFNT_CHAR16)) {
+ short [] fx = HDFNativeData.byteToShort(0,1,d1);
+ theFillValue[0] = new Short(fx[0]);
+ }
+ else if ((NT == HDFConstants.DFNT_UINT16) || (NT == HDFConstants.DFNT_UCHAR16)) {
+ short[] fmx = HDFNativeData.byteToShort(0,1,d1);
+ Short f = new Short(fmx[0]);
+ if (f.intValue() < 0) {
+ theFillValue[0] = new Integer(f.intValue() + 65536);
+ }
+ else {
+ theFillValue[0] = new Integer(f.intValue());
+ }
+ }
+ else if ((NT == HDFConstants.DFNT_INT32)) {
+ int [] fx = HDFNativeData.byteToInt(0,1,d1);
+ theFillValue[0] = new Integer(fx[0]);
+ }
+ else if ((NT == HDFConstants.DFNT_UINT32)) {
+ int[] fmx = HDFNativeData.byteToInt(0,1,d1);
+ Integer i = new Integer(fmx[0]);
+ if (i.floatValue() < 0) {
+ theFillValue[0] = new Float((float)(i.floatValue() + 4294967296.0));
+ }
+ else {
+ theFillValue[0] = new Float(i.floatValue());
+ }
+ }
+ else if (NT == HDFConstants.DFNT_FLOAT32) {
+ float [] fx = HDFNativeData.byteToFloat(0,1,d1);
+ theFillValue[0] = new Float(fx[0]);
+ }
+ else if (NT == HDFConstants.DFNT_FLOAT64) {
+ double [] fx = HDFNativeData.byteToDouble(0,1,d1);
+ theFillValue[0] = new Double(fx[0]);
+ }
+ else {
+ System.out.println("Error: SDgetfillvalue not converting, type "+NT);
+ }
+ return rval;
+ }
+
+ /**
+ * @param sdsid <b>IN</b>: id of the SDS as returned by SDselect
+ * @param max <b>OUT</b>: byte[], max value, as bytes
+ * @param min <b>OUT</b>: byte[], min value, as bytes
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the the max and min values in the form of a continous array of
+ * bytes.
+ *
+ * <p><b>NOTE:</b> to read into Java doubles, use the alternative
+ * routine below.
+ */
+ public static native boolean SDgetrange(long sdsid, byte[] max, byte[] min) throws HDFException;
+
+ /**
+ * @param sdsid <b>IN</b>: id of the SDS as returned by SDselect
+ * @param maxmin <b>OUT</b>: double[2], the max and min values
+ * converted to doubles
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return the the data in the Java array: maxmin[0] = max,
+ * maxmin[1] = min
+ *
+ * <p><b>Note:</b> the routine calls SDgetinfo to determine the
+ * correct type, reads the data as bytes, and converts to the
+ * double.
+ */
+ public static boolean SDgetrange(long sdsid, double maxmin[]) throws HDFException
+ {
+ int[] SDInfo = new int[3];
+ int NT;
+ String datasetname = new String(" ");
+ String ss[] = new String[1];
+ ss[0] = datasetname;
+ int dimsize[] = new int[16];
+ SDgetinfo(sdsid, ss, dimsize, SDInfo);
+ datasetname = ss[0];
+ byte[] max = new byte[8];
+ byte[] min = new byte[8];
+ boolean rval;
+ rval = SDgetrange(sdsid, max, min);
+ if (rval == false) {
+ return (rval);
+ }
+ NT = SDInfo[1];
+ if ((NT & HDFConstants.DFNT_LITEND) != 0) {
+ NT -= HDFConstants.DFNT_LITEND;
+ }
+ if ((NT == HDFConstants.DFNT_INT8) || (NT == HDFConstants.DFNT_CHAR8) || (NT == HDFConstants.DFNT_CHAR)) {
+ Byte f = new Byte(max[0]);
+ maxmin[0] = (f.doubleValue());
+ f = new Byte(min[0]);
+ maxmin[1] = (f.doubleValue());
+ }
+ else if ((NT == HDFConstants.DFNT_UINT8) || (NT == HDFConstants.DFNT_UCHAR8) || (NT == HDFConstants.DFNT_UCHAR8)) {
+ Byte f = new Byte(max[0]);
+ Short fmx;
+ if (f.shortValue() < 0) {
+ fmx = new Short((short) (f.intValue() + 256));
+ }
+ else {
+ fmx = new Short(f.shortValue());
+ }
+ maxmin[0] = (fmx.doubleValue());
+ f = new Byte(min[0]);
+ fmx = new Short(f.shortValue());
+ maxmin[1] = (fmx.doubleValue());
+ }
+ else if ((NT == HDFConstants.DFNT_INT16) || (NT == HDFConstants.DFNT_CHAR16)) {
+ short[] fmx = HDFNativeData.byteToShort(0, 1, max);
+ short[] fmn = HDFNativeData.byteToShort(0, 1, min);
+ Short f = new Short(fmx[0]);
+ maxmin[0] = (f.doubleValue());
+ f = new Short(fmn[0]);
+ maxmin[1] = (f.doubleValue());
+ }
+ else if ((NT == HDFConstants.DFNT_UINT16) || (NT == HDFConstants.DFNT_UINT16)) {
+ short[] fmx = HDFNativeData.byteToShort(0, 1, max);
+ Short f = new Short(fmx[0]);
+ Integer i;
+ if (f.intValue() < 0) {
+ i = new Integer(f.intValue() + 65536);
+ }
+ else {
+ i = new Integer(f.intValue());
+ }
+ maxmin[0] = (i.doubleValue());
+ fmx = HDFNativeData.byteToShort(0, 1, min);
+ f = new Short(fmx[0]);
+ if (f.intValue() < 0) {
+ i = new Integer(f.intValue() + 65536);
+ }
+ else {
+ i = new Integer(f.intValue());
+ }
+ maxmin[1] = (i.doubleValue());
+ }
+ else if ((NT == HDFConstants.DFNT_INT32)) {
+ int[] fmx = HDFNativeData.byteToInt(0, 1, max);
+ int[] fmn = HDFNativeData.byteToInt(0, 1, min);
+ Integer f = new Integer(fmx[0]);
+ maxmin[0] = (f.doubleValue());
+ f = new Integer(fmn[0]);
+ maxmin[1] = (f.doubleValue());
+ }
+ else if ((NT == HDFConstants.DFNT_UINT32)) {
+ int[] fmx = HDFNativeData.byteToInt(0, 1, max);
+ Integer i = new Integer(fmx[0]);
+ Float f;
+ if (i.floatValue() < 0) {
+ f = new Float((float) (i.floatValue() + 4294967296.0));
+ }
+ else {
+ f = new Float(i.floatValue());
+ }
+ maxmin[0] = (f.doubleValue());
+ fmx = HDFNativeData.byteToInt(0, 1, max);
+ i = new Integer(fmx[0]);
+ if (i.floatValue() < 0) {
+ f = new Float((float) (i.floatValue() + 4294967296.0));
+ }
+ else {
+ f = new Float(i.floatValue());
+ }
+ maxmin[1] = (f.doubleValue());
+ }
+ else if (NT == HDFConstants.DFNT_FLOAT32) {
+ float[] fmx = HDFNativeData.byteToFloat(0, 1, max);
+ float[] fmn = HDFNativeData.byteToFloat(0, 1, min);
+ Float f = new Float(fmx[0]);
+ maxmin[0] = (f.doubleValue());
+ f = new Float(fmn[0]);
+ maxmin[1] = (f.doubleValue());
+ }
+ else if (NT == HDFConstants.DFNT_FLOAT64) {
+ double[] fmx = HDFNativeData.byteToDouble(0, 1, max);
+ double[] fmn = HDFNativeData.byteToDouble(0, 1, min);
+ Double f = new Double(fmx[0]);
+ maxmin[0] = (f.doubleValue());
+ f = new Double(fmn[0]);
+ maxmin[1] = (f.doubleValue());
+
+ }
+ else {
+ System.out.println("Error: SDgetrange not converting, type " + NT);
+ }
+ return rval;
+ }
+
+ public static native long SDcreate(long sd_id, String name, long number_type, int rank, int[] dimsizes) throws HDFException;
+
+ public static native boolean SDisrecord(long sdsid) throws HDFException;
+
+ public static native boolean SDsetattr(long s_id, String attr_name, long num_type, int count,
+ byte[] values) throws HDFException;
+
+ public static boolean SDsetattr(long s_id, String attr_name, long num_type, int count, Object theValues) throws HDFException
+ {
+ byte[] data;
+ HDFArray theArray = new HDFArray(theValues);
+ data = theArray.byteify();
+ return SDsetattr(s_id, attr_name, num_type, count, data);
+ }
+
+ public static native boolean SDsetcal(long sds_id, double cal, double cal_err,
+ double offset, double offset_err, int number_type) throws HDFException;
+
+ public static native boolean SDsetdatastrs(long sds_id, String label, String unit, String format,
+ String coordsys) throws HDFException;
+
+ public static native boolean SDsetdimname(long dim_id, String dim_name) throws HDFException;
+
+ /**
+ * @param dim_id <b>IN</b>: id of a dimension
+ * @param count <b>IN</b>: number of values
+ * @param number_type <b>IN</b>: number type of the values
+ * @param data <b>IN</b>: byte[], the values, in an array of
+ * bytes.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>Note:</b> the calling program must assure that the
+ * data is correctly formatted for C. To write an array
+ * of Java objects, use the alternative routine below.
+ *
+ * @return true on success
+ */
+ public static native boolean SDsetdimscale(long dim_id, int count, int number_type, byte[] data) throws HDFException;
+
+ /**
+ * @param dim_id <b>IN</b>: id of a dimension
+ * @param count <b>IN</b>: number of values
+ * @param number_type <b>IN</b>: number type of the values
+ * @param theData <b>OUT</b>: Object, a Java array of appropriate
+ * type and size.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>Note:</b> converts the Java array to an array of
+ * bytes, and writes the bytes.
+ *
+ * @return true on success
+ */
+ public static boolean SDsetdimscale(long dim_id, int count, int number_type, Object theData) throws HDFException
+ {
+ byte[] data;
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.byteify();
+ return SDsetdimscale(dim_id, count, number_type, data);
+ }
+
+ public static native boolean SDsetdimstrs(long dim_id, String label, String unit, String format) throws HDFException;
+
+ public static native boolean SDsetexternalfile(long sds_id, String filename, int offset) throws HDFException;
+
+ /**
+ * @param sds_id <b>IN</b>: id of a dataset
+ * @param fill_val <b>IN</b>: byte[], the fill values in an array of
+ * bytes.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>Note:</b> the calling program must assure that the
+ * data is correctly formatted for C. To set the fill value
+ * with a Java object, use the alternative routine below.
+ *
+ * @return true on success
+ */
+ public static native boolean SDsetfillvalue(long sds_id, byte[] fill_val) throws HDFException;
+
+ /**
+ * @param sds_id <b>IN</b>: id of a dataset
+ * @param the_fill_val <b>IN</b>: Object, a Java object of appropriate
+ * type
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>Note:</b> converts the Java array to an array of
+ * bytes, and writes the bytes.
+ *
+ * @return true on success
+ */
+ public static boolean SDsetfillvalue(long sds_id, Object the_fill_val) throws HDFException
+ {
+ byte[] data;
+ HDFArray theArray = new HDFArray(the_fill_val);
+ data = theArray.byteify();
+ return SDsetfillvalue(sds_id, data);
+ }
+
+ /**
+ * @param sdsid <b>IN</b>: id of a dataset
+ * @param max <b>IN</b>: byte[], the max value in an array of bytes
+ * @param min <b>IN</b>: byte[], the min value in an array of bytes
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>Note:</b> the calling program must assure that the
+ * data is correctly formatted for C. To set the max and min value
+ * with Java objects, use the alternative routine below.
+ *
+ * @return true on success
+ */
+ public static native boolean SDsetrange(long sdsid, byte[] max, byte[] min) throws HDFException;
+
+ /**
+ * @param sdsid <b>IN</b>: id of a dataset
+ * @param max <b>IN</b>: Object, a Java object of appropriate type
+ * @param min <b>IN</b>: Object, a Java object of appropriate type
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>Note:</b> converts the Java array to an array of
+ * bytes, and writes the bytes.
+ *
+ * @return true on success
+ */
+ public static boolean SDsetrange(long sdsid, Object max, Object min) throws HDFException
+ {
+ byte[] d1;
+ byte[] d2;
+ HDFArray theArray1 = new HDFArray(max);
+ d1 = theArray1.byteify();
+ HDFArray theArray2 = new HDFArray(min);
+ d2 = theArray2.byteify();
+ return SDgetrange(sdsid, d1, d2);
+ }
+
+
+ /**
+ * @param sdsid <b>IN</b>: the SD interface id, returned by SDselect
+ * @param start <b>IN</b>: int[], start
+ * @param stride <b>IN</b>: int[], stride
+ * @param count <b>IN</b>: int[], count
+ * @param data <b>IN</b>: byte[], data in an array of bytes
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>NOTE:</b> to write from a Java array use the alternative
+ * routine below.
+ *
+ * @return true on success
+ */
+ public static native boolean SDwritedata(long sdsid, int[] start, int[] stride,
+ int[] count, byte[] data) throws HDFException;
+
+ /**
+ * @param sdsid <b>IN</b>: the SD interface id, returned by SDselect
+ * @param start <b>IN</b>: int[], start
+ * @param stride <b>IN</b>: int[], stride
+ * @param count <b>IN</b>: int[], count
+ * @param theData <b>IN</b>: Object, a Java array of appropriate
+ * type, dimensions, and size.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>Note:</b> converts to the Java array to a contiguous
+ * array of bytes and then writes to the file.
+ *
+ * @return true on success
+ */
+ public static boolean SDwritedata(long sdsid, int[] start, int[] stride,
+ int[] count, Object theData ) throws HDFException
+ {
+ byte[] data;
+
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.byteify();
+ return SDwritedata(sdsid, start, stride, count, data);
+ }
+
+ public static native boolean SDsetnbitdataset(long id, int start_bit, int bit_len,
+ int sign_ext, int fill_one) throws HDFException;
+
+ /**
+ * @param id <b>IN</b>: the SD identifier returned by SDselect
+ * @param type <b>IN</b>: the type of compression
+ * @param cinfo <b>IN</b>: HDFCompInfo, the compression info
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>NOTE:</b>The compression algorithm specific information is
+ * passed in an appropriate sub-class of HDFCompInfo.
+ *
+ * @return true on success
+ */
+ public static native boolean SDsetcompress(long id, int type, HDFCompInfo cinfo) throws HDFException;
+
+ /**
+ * @deprecated As of HDF 4.2.9, replaced by {@link #SDgetcompinfo(long, HDFCompInfo)}
+ * @param id <b>IN</b>: the SD identifier returned by SDselect
+ * @param cinfo <b>IN</b>: HDFCompInfo, the compression info
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>NOTE:</b>The compression algorithm specific information is
+ * passed in an appropriate sub-class of HDFCompInfo.
+ *
+ * @return true on success
+ */
+
+ public static native boolean SDgetcompinfo(long id, HDFCompInfo cinfo) throws HDFException;
+
+ public static native boolean SDsetaccesstype(long id, int accesstype) throws HDFException;
+
+ public static native boolean SDsetblocksize(long sdsid, int block_size) throws HDFException;
+
+ /**
+ * @param sdsid <b>IN</b>: the SD id
+ * @param fill_enable <b>IN</b>: boolean, true calls library with
+ * SD_FILL, false calls library with SD_NOFILL
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return true on success
+ */
+ public static boolean SDsetfillmode(long sdsid, boolean fill_enable) throws HDFException
+ {
+ int fm;
+
+ if (fill_enable) {
+ fm = HDFConstants.SD_FILL;
+ }
+ else {
+ fm = HDFConstants.SD_NOFILL;
+ }
+ return SDsetfillmode(sdsid, fm);
+ }
+
+ public static native boolean SDsetfillmode(long sdsid, int fillmode) throws HDFException;
+
+ public static native boolean SDsetdimval_comp(long dimid, int comp_mode) throws HDFException;
+
+ public static native boolean SDisdimval_bwcomp(long dimid) throws HDFException;
+
+ /**
+ * @param sdsid <b>IN</b>: the SD identifier returned by SDselect
+ * @param chunk_def <b>IN</b>: HDFChunkInfo, the chunking info
+ * @param flags <b>IN</b>: the type of chunking
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>NOTE:</b>The chunking algorithm-specific information is
+ * passed in an appropriate sub-class of HDFChunkInfo.
+ *
+ * @return true on success
+ */
+ public static native boolean SDsetchunk(long sdsid, HDFChunkInfo chunk_def, int flags) throws HDFException;
+
+ /**
+ * @param sdsid <b>IN</b>: the SD identifier returned by SDselect
+ * @param chunk_def <b>OUT</b>: HDFChunkInfo, the chunking info
+ * @param clflags <b>OUT</b>: int[1], the type of chunking
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * @return c_info contains information about the chunking method,
+ * clflags[0] == the chunking flags
+ *
+ * <p><b>NOTE:</b>The chunking algorithm-specific information is
+ * passed in an appropriate sub-class of HDFChunkInfo.
+ */
+ public static native boolean SDgetchunkinfo(long sdsid, HDFChunkInfo chunk_def,
+ int[] clflags) throws HDFException;
+
+ /**
+ * @param sdsid <b>IN</b>: the SD interface id, returned by SDselect
+ * @param origin <b>IN</b>: int[], origin
+ * @param theData <b>OUT</b>: byte[], the data in an array of bytes
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>NOTE:</b> to read into a Java array use the alternative
+ * routine below.
+ *
+ * @return true on success
+ */
+ public static native boolean SDreadchunk(long sdsid, int[] origin, byte[] theData) throws HDFException;
+
+ /**
+ * @param sdsid <b>IN</b>: the SD interface id, returned by SDselect
+ * @param origin <b>IN</b>: int[], origin
+ * @param theData <b>IN</b>: Object, a Java array of appropriate
+ * type, dimensions, and size.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>Note:</b> reads the data as a contiguous
+ * array of bytes and then converts it to an appropriate Java object.
+ *
+ * @return true on success
+ */
+ public static boolean SDreadchunk(long sdsid, int[] origin, Object theData) throws HDFException
+ {
+ byte[] data;
+ boolean rval;
+
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.emptyBytes();
+ rval = SDreadchunk(sdsid, origin, data);
+ theData = theArray.arrayify(data);
+ return rval;
+ }
+
+ public static native int SDsetchunkcache(long sdsid, int maxcache, int flags) throws HDFException;
+
+ /**
+ * @param sdsid <b>IN</b>: the SD interface id, returned by SDselect
+ * @param origin <b>IN</b>: int[], origin
+ * @param data <b>IN</b>: byte[], data to be written, in an array of bytes
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>NOTE:</b> to write from a Java array use the alternative
+ * routine below.
+ *
+ * @return true on success
+ */
+ public static native boolean SDwritechunk(long sdsid, int[] origin, byte[] data) throws HDFException;
+
+ /**
+ * @param sdsid <b>IN</b>: the SD interface id, returned by SDselect
+ * @param origin <b>IN</b>: int[], origin
+ * @param theData <b>IN</b>: Object, a Java array of appropriate
+ * type, dimensions, and size.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>Note:</b> converts to the Java array to a contiguous
+ * array of bytes and then writes to the file.
+ *
+ * @return true on success
+ */
+ public static boolean SDwritechunk(long sdsid, int[] origin, Object theData) throws HDFException
+ {
+ byte[] data;
+
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.byteify();
+ return SDwritechunk(sdsid, origin, data);
+ }
+
+ public static native int VFfieldesize(long vdata_id, int field_index) throws HDFException;
+
+ public static native int VFfieldisize(long vdata_id, int field_index) throws HDFException;
+
+ public static native String VFfieldname(long vdata_id, int field_index) throws HDFException;
+
+ public static native int VFfieldorder(long vdata_id, int field_index) throws HDFException;
+
+ public static native long VFfieldtype(long vdata_id, int field_index) throws HDFException;
+
+ public static native int VFnfields(long vkey) throws HDFException;
+
+ public static native int VHmakegroup(long file_id, int[] tag_array,
+ int[] ref_array, int n_objects, String vgroup_name,
+ String vgroup_class) throws HDFException;
+
+ /**
+ * @param file_id <b>IN</b>: the SD interface id, returned by SDselect
+ * @param fieldname <b>IN</b>: String, the name of the field to be filled
+ * @param buf <b>IN</b>: byte[], data to be written, in an array of bytes
+ * @param n_records <b>IN</b>: int, the number of records being written
+ * @param data_type <b>IN</b>: int, the number type of the data
+ * @param vdata_name <b>IN</b>: String, the name of the Vdata
+ * @param vdata_class <b>IN</b>: String, the class of the Vdata
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>NOTE:</b> to write from a Java array use the alternative
+ * routine below.
+ *
+ * @return negative on failure
+ */
+ public static native int VHstoredata(long file_id, String fieldname,
+ byte[] buf, int n_records, int data_type, String vdata_name,
+ String vdata_class) throws HDFException;
+ /**
+ * @param file_id <b>IN</b>: the SD interface id, returned by SDselect
+ * @param fieldname <b>IN</b>: String, the name of the field to be filled
+ * @param thebuf <b>IN</b>: Object, data to be written, in a Java array
+ * of appropriate type and size
+ * @param n_records <b>IN</b>: int, the number of records being written
+ * @param data_type <b>IN</b>: int, the number type of the data
+ * @param vdata_name <b>IN</b>: String, the name of the Vdata
+ * @param vdata_class <b>IN</b>: String, the class of the Vdata
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>Note:</b> converts to the Java array to a contiguous
+ * array of bytes and then writes to the file.
+ *
+ * @return negative on failure
+ */
+ public static int VHstoredata(long file_id, String fieldname,
+ Object thebuf, int n_records, int data_type, String vdata_name,
+ String vdata_class) throws HDFException
+ {
+ byte[] data;
+
+ HDFArray theArray = new HDFArray(thebuf);
+ data = theArray.byteify();
+ return VHstoredata(file_id, fieldname, data, n_records, data_type, vdata_name, vdata_class);
+ }
+
+ /**
+ * @param file_id <b>IN</b>: the SD interface id, returned by SDselect
+ * @param fieldname <b>IN</b>: String, the name of the field to be filled
+ * @param buf <b>IN</b>: byte[], data to be written, in an array of bytes
+ * @param n_records <b>IN</b>: int, the number of records being written
+ * @param data_type <b>IN</b>: int, the number type of the data
+ * @param vdata_name <b>IN</b>: String, the name of the Vdata
+ * @param vdata_class <b>IN</b>: String, the class of the Vdata
+ * @param order <b>IN</b>: int, the number of components per field
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>NOTE:</b> to write from a Java array use the alternative
+ * routine below.
+ *
+ * @return negative on failure
+ */
+ public static native int VHstoredatam(long file_id, String fieldname, byte[] buf,
+ int n_records, int data_type, String vdata_name, String vdata_class,
+ int order) throws HDFException;
+
+ /**
+ * @param file_id <b>IN</b>: the SD interface id, returned by SDselect
+ * @param fieldname <b>IN</b>: String, the name of the field to be filled
+ * @param buf <b>IN</b>: Object, data to be written, in a Java array
+ * of appropriate type, dimension, and size
+ * @param n_records <b>IN</b>: int, the number of records being written
+ * @param data_type <b>IN</b>: int, the number type of the data
+ * @param vdata_name <b>IN</b>: String, the name of the Vdata
+ * @param vdata_class <b>IN</b>: String, the class of the Vdata
+ * @param order <b>IN</b>: int, the number of components per field
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors.
+ *
+ * <p><b>Note:</b> converts to the Java array to a contiguous
+ * array of bytes and then writes to the file.
+ *
+ * @return negative on failure
+ */
+ public static int VHstoredatam(long file_id, String fieldname, Object buf,
+ int n_records, int data_type, String vdata_name,
+ String vdata_class, int order) throws HDFException
+ {
+ byte[] data;
+
+ HDFArray theArray = new HDFArray(buf);
+ data = theArray.byteify();
+ return VHstoredatam(file_id, fieldname, data, n_records, data_type, vdata_name, vdata_class, order);
+ }
+
+ public static native int VQueryref(long vkey) throws HDFException;
+ public static native int VQuerytag(long vkey) throws HDFException;
+
+ public static native boolean Vstart(long fid) throws HDFException;
+
+ public static native int Vattach(long fid, int vgroup_ref, String access) throws HDFException;
+
+ public static native void Vdetach(long vgroup_id) throws HDFException;
+
+ public static native void Vend(long file_id) throws HDFException;
+
+ public static native int Vgetid(long file_id, int vgroup_ref) throws HDFException;
+
+ /**
+ * @param vgroup_id <b>IN</b>: the Vgroup id
+ * @param hdfclassname <b>OUT</b>: String[1], the HDF class of
+ * the vgroup.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ */
+ public static native void Vgetclass(long vgroup_id, String[] hdfclassname) throws HDFException;
+
+ /**
+ * @param vgroup_id <b>IN</b>: the Vgroup id
+ * @param hdfname <b>OUT</b>: String[1], the name of
+ * the vgroup.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ */
+ public static native void Vgetname(long vgroup_id, String[] hdfname) throws HDFException;
+
+ public static native boolean Visvg(long vgroup_id, int vgroup_ref) throws HDFException;
+
+ public static native boolean Visvs(long vgroup_id, int vdata_ref) throws HDFException;
+
+ /**
+ * @param vgroup_id <b>IN</b>: the Vgroup id
+ * @param tags <b>OUT</b>: int[arraysize], the tags
+ * @param refs <b>OUT</b>: int[arraysize], the refs
+ * @param arraysize <b>IN</b>: int, the number of tags/refs to
+ * return
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * @return tags[] = tags for objects 0 - n, refs[] = refs for
+ * objects 0 - n
+ */
+ public static native int Vgettagrefs(long vgroup_id, int[] tags, int[] refs,
+ int arraysize) throws HDFException;
+
+ /**
+ * @param vgroup_id - IN: the Vgroup id
+ * @param index - IN: the index of the object
+ * @param tagref - OUT: tagref[0]=tag, tagref[1]=ref
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * @return tags[0] = tag for object #index, refs[0] = ref for
+ * objects #index
+ */
+ public static native boolean Vgettagref(long vgroup_id, int index, int[] tagref) throws HDFException;
+
+ public static native int Vntagrefs(long vgroup_id) throws HDFException;
+
+ public static native boolean Vinqtagref(long vgroup_id, int tag, int ref) throws HDFException;
+
+ /**
+ * @param fid <b>IN</b>: the file identifier returned by Hopen
+ * @param ref_array <b>OUT</b>: int[], the refs for Vdata not part
+ * of Vgroups
+ * @param buffersize <b>IN</b>: the max size of the ref_array
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * @return ref_array[] = refs 0 - ...
+ */
+ public static native int Vlone(long fid, int[] ref_array, int buffersize) throws HDFException;
+
+ public static native int Vaddtagref(long vgroup_id, int tag, int ref) throws HDFException;
+
+ public static native int Vdeletetagref(long vgroup_id, int tag, int ref) throws HDFException;
+
+ public static native int Vfind(long file_id, String vgroup_name) throws HDFException;
+
+ public static native int Vfindclass(long file_id, String vgclassname) throws HDFException;
+
+ public static native int Vflocate(int key, String vgclassname) throws HDFException;
+
+ public static native int Vgetnext(int key, int ref) throws HDFException;
+
+ /**
+ * @param vgroup_id <b>IN</b>: the Vgroup id
+ * @param n_entries <b>OUT</b>: int[1], the number of objects in the Vgroup
+ * @param vgroup_name <b>OUT</b>: String[1], the name of the Vgroup
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * @return n_entries[0] = number of entries, vgroup_name[0] = the name
+ */
+ public static native boolean Vinquire(long vgroup_id, int[] n_entries, String[] vgroup_name) throws HDFException;
+
+ public static native int Vinsert(long vgroup_id, long v_id) throws HDFException;
+
+ public static native int Vnrefs(int key, int ref) throws HDFException;
+
+ public static native boolean Vsetclass(long vgroup_id, String vgclassname) throws HDFException;
+
+ public static native boolean Vsetname(long vgroup_id, String vgname) throws HDFException;
+
+ /**
+ * @param id <b>IN</b>: Vgroup identifier returned by Vattach
+ * @param index <b>IN</b>: the index of the attribute
+ * @param name <b>OUT</b>: String[1], the name of the attribute
+ * @param argv <b>OUT</b>: int[5],
+ * Data type of the target attribute,
+ * Number of values in the target attribute,
+ * Size, in bytes, of the values of the target attribute,
+ * ,
+ *
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call, but is not yet implemented.
+ *
+ *
+ * @return name[0] = name, argv[0] = data_type, argv[1] = count,
+ * argv[2] = size, argv[3] = nfields, argv[4] = refnum
+ */
+ public static native boolean Vattrinfo(long id, int index, String[] name, int[] argv) throws HDFException;
+
+
+ public static native long Vfindattr(long id, String name) throws HDFException;
+
+ /**
+ * @param id <b>IN</b>: the Vdata id
+ * @param index <b>IN</b>: the index of the attribute
+ * @param data <b>OUT</b>: byte[], the data in an array of bytes
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>NOTE:</b> to read into a Java array use the alternative
+ * routine below.
+ *
+ * @return data = the value of the attribute, in an array of bytes
+ */
+ public static native boolean Vgetattr(long id, int index, byte[] data) throws HDFException;
+
+ /**
+ * @param id <b>IN</b>: the Vdata id
+ * @param index <b>IN</b>: the index of the attribute
+ * @param theData <b>OUT</b>: Object, a Java array of appropriate
+ * type, dimensions, and size.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>Note:</b> reads the data as a contiguous
+ * array of bytes and then converts it to an appropriate Java object.
+ *
+ * @return data = the value of the attribute, in an array of Java
+ * objects
+ */
+ public static boolean Vgetattr(long id, int index, Object theData) throws HDFException
+ {
+ byte[] data;
+ boolean rval;
+
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.emptyBytes();
+ rval = Vgetattr(id, index, data);
+ theData = theArray.arrayify(data);
+ return rval;
+ }
+
+ public static native int Vgetversion(long id) throws HDFException;
+
+ public static native int Vnattrs(long id) throws HDFException;
+
+ public static native boolean Vsetattr(long id, String attr_name,
+ long data_type, int count, String values) throws HDFException;
+
+ /**
+ * @param id <b>IN</b>: the Vdata id
+ * @param attr_name <b>IN</b>: String, the name of the attribute
+ * @param data_type <b>IN</b>: int, the number_type of the attribute
+ * @param count <b>IN</b>: the number of values
+ * @param data <b>IN</b>: byte[], the data in an array of bytes
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>NOTE:</b> to write into a Java array use the alternative
+ * routine below.
+ *
+ * @return true on success
+ */
+ public static native boolean Vsetattr(long id, String attr_name,
+ long data_type, int count, byte[] data) throws HDFException;
+
+ /**
+ * @param id <b>IN</b>: the Vdata id
+ * @param attr_name <b>IN</b>: String, the name of the attribute
+ * @param data_type <b>IN</b>: int, the number_type of the attribute
+ * @param count <b>IN</b>: the number of values
+ * @param theData <b>IN</b>: Object, a Java array of appropriate
+ * type, dimensions, and size.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>Note:</b> converts the data to a contiguous
+ * array of bytes and then converts writes it.
+ *
+ * @return true on success
+ */
+ public static boolean Vsetattr(long id, String attr_name,
+ long data_type, int count, Object theData) throws HDFException
+ {
+ byte[] data;
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.byteify();
+ return Vsetattr(id, attr_name, data_type, count, data);
+ }
+
+ /**
+ * @param vdata_id <b>IN</b>, vdata id as returned by VSattach
+ * @param n_records <b>OUT</b>, int[1], the number of records in the vdata
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * @return n_records[0] == the number of records
+ */
+ public static native boolean VSQuerycount(long vdata_id, int[] n_records) throws HDFException;
+
+ /**
+ * @param vdata_id <b>IN</b>, vdata id as returned by VSattach
+ * @param fields <b>OUT</b>, String[1], the names of the fields
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * @return fields[0] == a comma delimited string with the names
+ * of the fields.
+ */
+ public static native boolean VSQueryfields(long vdata_id, String[] fields) throws HDFException;
+
+ /**
+ * @param vdata_id <b>IN</b>, vdata id as returned by VSattach
+ * @param interlace <b>OUT</b>, int[1], the interlace mode,
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * @return interlace[0] == the number of records,
+ * HDFConstants.FULL_INTERLACE or HDFConstants.NO_INTERLACE
+ */
+ public static native boolean VSQueryinterlace(long vdata_id, int[] interlace) throws HDFException;
+
+ /**
+ * @param vdata_id <b>IN</b>, vdata id as returned by VSattach
+ * @param vdata_name <b>OUT</b>, String[1], the name of the vdata
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * @return vdata_name[0] == the name
+ */
+ public static native boolean VSQueryname(long vdata_id, String[] vdata_name) throws HDFException;
+
+ public static native int VSQueryref(long vdata_id) throws HDFException;
+
+ public static native int VSQuerytag(long vdata_id) throws HDFException;
+
+ /**
+ * @param vdata_id <b>IN</b>, vdata id as returned by VSattach
+ * @param vdata_size <b>OUT</b>, int[1], the size of the vdata
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * @return vdata_size[0] == Native size, in bytes, of a record in the vdata
+ */
+ public static native boolean VSQueryvsize(long vdata_id, int[] vdata_size) throws HDFException;
+
+ public static native int VSattach(long fid, int vdata_ref, String access) throws HDFException;
+
+ public static native void VSdetach(long vdata_id) throws HDFException;
+
+ public static native long VSgetid(long file_id, int vdata_ref) throws HDFException;
+
+ /**
+ * @param vdata_id <b>IN</b>, vdata id as returned by VSattach
+ * @param hdfclassname <b>OUT</b>, String[1], the class name of the vdata
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ */
+ public static native void VSgetclass(long vdata_id, String[] hdfclassname) throws HDFException;
+
+ /**
+ * @param vdata_id <b>IN</b>, vdata id as returned by VSattach
+ * @param hdfname <b>OUT</b>, String[1], the name of the vdata
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ */
+ public static native void VSgetname(long vdata_id, String[] hdfname) throws HDFException;
+
+ public static native int VSelts(long vdata_id) throws HDFException;
+
+ public static native boolean VSfdefine(long vdata_id, String fieldname,
+ int numbertype, int order) throws HDFException;
+
+ public static native boolean VSfexist(long vdata_id, String fields) throws HDFException;
+
+ public static native int VSfind(long file_id, String vdataname) throws HDFException;
+
+ public static native int VSsetblocksize(long vdata_id, int blocksize) throws HDFException;
+
+ public static native int VSsetnumblocks(long vdata_id, int numblocks) throws HDFException;
+
+ /**
+ * @param vdata_id <b>IN</b>, vdata id as returned by VSattach
+ * @param fieldname <b>OUT</b>, String[1], the names of the fields
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * @return fieldname[0] == a comma delimited string with the names
+ * of the fields.
+ */
+ public static native int VSgetfields(long vdata_id, String[] fieldname) throws HDFException;
+
+ public static native int VSgetinterlace(long vdata_id) throws HDFException;
+
+ /**
+ * @param vdata_id <b>IN</b>, vdata id as returned by VSattach
+ * @param iargs <b>OUT</b>, int[3], n_records, interlace, vdata_size
+ * @param sargs <b>OUT</b>, String[2], names the dataset, fields
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * @return iargs[0] = n_records, iargs[1] = interlace, iargs[2] = vdata_size,
+ * sargs[0] = vdata_name, sargs[1] = comma delimited list of fields
+ *
+ * <p><b>NOTE:</b> the parameters for the Java interface are not in
+ * the same order as the C interface.
+ */
+ public static native boolean VSinquire(long vdata_id, int[] iargs, String[] sargs) throws HDFException;
+
+ /**
+ * @param vdata_id <b>IN</b>, vdata id as returned by VSattach
+ * @param iargs <b>OUT</b>, int[2], block_size, num_blocks
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * @return iargs[0] = blocksize, iargs[1] = num_blocks
+ *
+ * <p><b>NOTE:</b> the parameters for the Java interface are not in
+ * the same order as the C interface.
+ */
+ public static native boolean VSinquire(long vdata_id, int[] iargs ) throws HDFException;
+
+ /**
+ * @param fid <b>IN</b>, File identifier returned by Hopen
+ * @param ref_array <b>OUT</b>, int[?], the refs
+ * @param buffersize <b>IN</b>, int, the max number of refs to
+ * return.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * @return ref_array contains refs. The Java API ignores the buffersize
+ * argument, returns as many as possible.
+ */
+ public static native int VSlone(long fid, int[] ref_array, int buffersize) throws HDFException;
+
+ /**
+ * @param vdata_id <b>IN</b>: the Vdata id
+ * @param databuf <b>OUT</b>: byte[], the data in an array of bytes
+ * @param nrecord <b>IN</b>: int, number of records
+ * @param interlace <b>IN</b>: int, interlace
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>NOTE:</b> to read into a Java array use the alternative
+ * routine below.
+ *
+ * @return the number of elements read (0 or a +ve integer)
+ */
+ public static native int VSread(long vdata_id, byte[] databuf, int nrecord, int interlace) throws HDFException;
+
+ /**
+ * @param vdata_id <b>IN</b>: the Vdata id
+ * @param theData <b>OUT</b>: Object, a Java array of appropriate
+ * type, dimensions, and size.
+ * @param nrecord <b>IN</b>: int, number of records
+ * @param interlace <b>IN</b>: int, interlace
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>Note:</b> reads the data as a contiguous
+ * array of bytes and then converts it to an appropriate Java object.
+ *
+ * @return the number of elements read (0 or a +ve integer)
+ */
+ public static int VSread(long vdata_id, Object theData, int nrecord, int interlace) throws HDFException
+ {
+ byte[] data;
+ int rval;
+
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.emptyBytes();
+ rval = VSread(vdata_id, data, nrecord, interlace);
+ theData = theArray.arrayify( data );
+ return rval;
+ }
+
+ public static native int VSseek(long vdata_id, int record) throws HDFException;
+
+ public static native boolean VSsetfields(long vdata_id, String fields) throws HDFException;
+
+ public static native boolean VSsetinterlace(long vdata_id, int interlace) throws HDFException;
+
+ public static native int VSsizeof(long vdata_id, String fields) throws HDFException;
+
+ public static native boolean VSappendable(int vkey, int block_size) throws HDFException;
+
+ public static native int VSfindclass(long file_id, String vgclass) throws HDFException;
+
+ public static native int VSgetversion(int vkey) throws HDFException;
+
+ public static native void VSsetclass(long vdata_id, String vdata_class) throws HDFException;
+
+ public static native boolean VSsetexternalfile(int vkey, String filename, int offset) throws HDFException;
+
+ public static native void VSsetname(long vdata_id, String vdata_name) throws HDFException;
+
+ /**
+ * @param vdata_id <b>IN</b>: the Vdata id
+ * @param databuf <b>IN</b>: byte[], the data in an array of bytes
+ * @param n_records <b>IN</b>: int, number of records
+ * @param interlace <b>IN</b>: int, interlace
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>NOTE:</b> to write a Java array use the alternative
+ * routine below.
+ *
+ * @return the number of elements written (0 or a +ve integer)
+ */
+ public static native int VSwrite(long vdata_id, byte [] databuf, int n_records, int interlace) throws HDFException;
+
+ /**
+ * @param vdata_id <b>IN</b>: the Vdata id
+ * @param databuf <b>IN</b>: Object, a Java array of appropriate
+ * type, dimensions, and size.
+ * @param n_records <b>IN</b>: int, number of records
+ * @param interlace <b>IN</b>: int, interlace
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>Important Note:</b> This interface only supports
+ * records which are all of the same numeric type, with
+ * no character fields. Heterogeneous fields can be
+ * written as bytes (see above), but the calling program
+ * must make sure the data is in proper order to
+ * write to the HDF library.
+ *
+ * <p><b>Note:</b> converts the data into a contiguous
+ * array of bytes and then writes it
+ *
+ * @return the number of elements written (0 or a +ve integer)
+ */
+ public static int VSwrite(long vdata_id, Object databuf, int n_records, int interlace) throws HDFException
+ {
+ byte[] data;
+
+ HDFArray theArray = new HDFArray(databuf);
+ data = theArray.byteify();
+ return VSwrite( vdata_id, data, n_records, interlace);
+ }
+
+ /**
+ * @param id <b>IN</b>: the Vdata id
+ * @param index <b>IN</b>: int, the index of the attribute
+ * @param attr_index <b>IN</b>: int, the index of the attribute
+ * @param name <b>OUT</b>: String[1], the name of the attribute
+ * @param argv <b>OUT</b>: int[3],
+ * Data type of the target attribute,
+ * Number of values in the target attribute,
+ * Size, in bytes, of the values of the target attribute,
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * @return name[0] = name, argv[0] = data_type, argv[1] = count,
+ * argv[2] = size
+ */
+ public static native boolean VSattrinfo(long id, int index, int attr_index, String[] name, int[] argv) throws HDFException;
+
+ /**
+ * @param id <b>IN</b>: the Vdata id
+ * @param name <b>IN</b>: the name of the attribute
+ * @param findex <b>IN</b>: int[1], the index of the attribute
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * @return findex[0] = the index
+ */
+ public static native int VSfindex(long id, String name, int[] findex) throws HDFException;
+
+ public static native int VSfindattr(long id, int index, String name) throws HDFException;
+
+ public static native int VSfnattrs(long id, int fnattrs) throws HDFException;
+
+ /**
+ * @param id <b>IN</b>: the Vdata id
+ * @param index <b>IN</b>: the index of the vdata
+ * @param attr_index <b>IN</b>: the index of the attribute
+ * @param data <b>OUT</b>: byte[], the data in an array of bytes
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>NOTE:</b> to read into a Java array use the alternative
+ * routine below.
+ *
+ * @return data = the value of the attribute, in an array of bytes
+ */
+ public static native boolean VSgetattr(long id, int index, int attr_index, byte[] data) throws HDFException;
+
+ /**
+ * @param id <b>IN</b>: the Vdata id
+ * @param index <b>IN</b>: the index of the vdata
+ * @param attr_index <b>IN</b>: the index of the attribute
+ * @param theData <b>OUT</b>: Object, a Java array of appropriate
+ * type, dimensions, and size.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>Note:</b> reads the data as a contiguous
+ * array of bytes and then converts it to an appropriate Java object.
+ *
+ * @return data = the value of the attribute, in an array of Java
+ * objects
+ */
+ public static boolean VSgetattr(long id, int index, int attr_index, Object theData) throws HDFException
+ {
+ byte[] data;
+ boolean rval;
+
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.emptyBytes();
+ rval = VSgetattr(id, index, attr_index, data);
+ theData = theArray.arrayify(data);
+ return rval;
+ }
+ public static native boolean VSisattr(long id) throws HDFException;
+
+ public static native int VSnattrs(long id) throws HDFException;
+
+ /**
+ * @param id <b>IN</b>: the Vdata id
+ * @param index <b>IN</b>: the index of the vdata
+ * @param attr_name <b>IN</b>: String, the name of the attribute
+ * @param data_type <b>IN</b>: int, the number_type of the attribute
+ * @param count <b>IN</b>: the number of values
+ * @param values <b>IN</b>: String, the data in an String
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>NOTE:</b> to write from a Java array use the alternative
+ * routine below.
+ *
+ * @return true on success
+ */
+ public static native boolean VSsetattr(long id, int index, String attr_name,
+ long data_type, int count, String values) throws HDFException;
+
+ /**
+ * @param id <b>IN</b>: the Vdata id
+ * @param index <b>IN</b>: the index of the vdata
+ * @param attr_name <b>IN</b>: String, the name of the attribute
+ * @param data_type <b>IN</b>: int, the number_type of the attribute
+ * @param count <b>IN</b>: the number of values
+ * @param values <b>IN</b>: byte[], the data in an array of bytes
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>NOTE:</b> to write into a Java array use the alternative
+ * routine below.
+ *
+ * @return true on success
+ */
+ public static native boolean VSsetattr(long id, int index, String attr_name,
+ long data_type, int count, byte[] values) throws HDFException;
+
+ /**
+ * @param id <b>IN</b>: the Vdata id
+ * @param index <b>IN</b>: the index of the vdata
+ * @param attr_name <b>IN</b>: String, the name of the attribute
+ * @param data_type <b>IN</b>: int, the number_type of the attribute
+ * @param count <b>IN</b>: the number of values
+ * @param theData <b>IN</b>: Object, a Java array of appropriate
+ * type, dimensions, and size.
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>Note:</b> converts the data to a contiguous
+ * array of bytes and then converts writes it.
+ *
+ * @return true on success
+ */
+ public static boolean VSsetattr(long id, int index, String attr_name,
+ long data_type, int count, Object theData) throws HDFException
+ {
+ byte[] data;
+ HDFArray theArray = new HDFArray(theData);
+ data = theArray.byteify();
+ return VSsetattr(id, index, attr_name, data_type, count, data);
+ }
+
+ /**
+ * @param fileName <b>IN</b>: String, the file
+ * @param argv <b>OUT</b>: int[3], the width, height, and interlace mode
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * @return argv[0] = width, argv[1] = height, argv[2] = interlace
+ */
+ public static native boolean DF24getdims(String fileName, int[] argv) throws HDFException;
+
+ public static native boolean DF24reqil(int il) throws HDFException;
+
+ /**
+ * @param fileName <b>IN</b>: String, the file
+ * @param imagedata <b>OUT</b>: byte[], the image, in an array of
+ * bytes
+ * @param width <b>IN</b>: int, the width of the image
+ * @param height <b>IN</b>: int, the height of the image
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>NOTE:</b> to read into a Java array use the alternative
+ * routine below.
+ *
+ * @return data = the image in an array of bytes
+ */
+ public static native boolean DF24getimage(String fileName, byte[] imagedata,
+ int width, int height) throws HDFException;
+
+ /**
+ * @param fileName <b>IN</b>: String, the file
+ * @param theImagedata <b>OUT</b>: Object, the image, in a java
+ * array of appropriate size and type
+ * @param width <b>IN</b>: int, the width of the image
+ * @param height <b>IN</b>: int, the height of the image
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>Note:</b> reads the data as a contiguous
+ * array of bytes and then converts it to an appropriate Java object.
+ *
+ * @return data = the value of the attribute, in an array of Java
+ * objects
+ */
+ public static boolean DF24getimage(String fileName, Object theImagedata, int width, int height) throws HDFException
+ {
+ byte[] data;
+ boolean rval;
+
+ HDFArray theArray = new HDFArray(theImagedata);
+ data = theArray.emptyBytes();
+ rval = DF24getimage(fileName, data, width, height);
+ theImagedata = theArray.arrayify(data);
+ return rval;
+ }
+
+ public static native short DF24lastref() throws HDFException;
+
+ public static native boolean DF24restart() throws HDFException;
+
+ public static native boolean DF24readref(String filename, int ref) throws HDFException;
+
+ public static native int DF24nimages(String fileName) throws HDFException;
+
+ /**
+ * @param filename <b>IN</b>: String, the file
+ * @param image <b>IN</b>: byte[], the image, in an array of
+ * bytes
+ * @param width <b>IN</b>: int, the width of the image
+ * @param height <b>IN</b>: int, the height of the image
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>NOTE:</b> to write from a Java array use the alternative
+ * routine below.
+ *
+ * @return true on success
+ */
+ public static native boolean DF24addimage(String filename, byte[] image,
+ int width, int height) throws HDFException;
+
+ /**
+ * @param filename <b>IN</b>: String, the file
+ * @param theImage <b>IN</b>: Object, the image, in a java
+ * array of appropriate size and type
+ * @param width <b>IN</b>: int, the width of the image
+ * @param height <b>IN</b>: int, the height of the image
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>Note:</b> converts the data into a contiguous
+ * array of bytes and then writes it to the file
+ *
+ * @return true on success
+ */
+ public static boolean DF24addimage(String filename, Object theImage, int width,
+ int height) throws HDFException
+ {
+ byte[] data;
+ HDFArray theArray = new HDFArray(theImage);
+ data = theArray.byteify();
+ return DF24addimage(filename, data, width, height);
+ }
+
+ /**
+ * @param filename <b>IN</b>: String, the file
+ * @param image <b>IN</b>: byte[], the image, in an array of
+ * bytes
+ * @param width <b>IN</b>: int, the width of the image
+ * @param height <b>IN</b>: int, the height of the image
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>NOTE:</b> to write from a Java array use the alternative
+ * routine below.
+ *
+ * @return true on success
+ */
+ public static native boolean DF24putimage(String filename, byte[] image,
+ int width, int height) throws HDFException;
+
+ /**
+ * @param filename <b>IN</b>: String, the file
+ * @param theImage <b>IN</b>: Object, the image, in a java
+ * array of appropriate size and type
+ * @param width <b>IN</b>: int, the width of the image
+ * @param height <b>IN</b>: int, the height of the image
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>Note:</b> converts the data into a contiguous
+ * array of bytes and then writes it to the file
+ *
+ * @return true on success
+ */
+ public static boolean DF24putimage(String filename, Object theImage, int width, int height) throws HDFException
+ {
+ byte[] data;
+ HDFArray theArray = new HDFArray(theImage);
+ data = theArray.byteify();
+ return DF24putimage(filename, data, width, height);
+ }
+
+ /**
+ * @param type <b>IN</b>: int, the type of compression
+ * @param cinfo <b>IN</b>: HDFCompInfo, the compression parameters
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ * @return true on success
+ */
+ public static native boolean DF24setcompress(int type, HDFCompInfo cinfo) throws HDFException;
+
+ public static native boolean DF24setdims(int width, int height) throws HDFException;
+
+ public static native boolean DF24setil(int il) throws HDFException;
+
+ /**
+ * @param fileName <b>IN</b>: String, the file
+ * @param argv <b>OUT</b>: int[2], the width and height
+ * @param haspalette <b>OUT</b>: boolean[1], has a palette
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * @return argv[0] = width, argv[1] = height, haspalette[0] = palette
+ */
+ public static native boolean DFR8getdims(String fileName, int[] argv, boolean[] haspalette) throws HDFException;
+
+ /**
+ * @param fileName <b>IN</b>: String, the file
+ * @param imagedata <b>OUT</b>: byte[], the image, in an array of
+ * bytes
+ * @param width <b>IN</b>: int, the width of the image
+ * @param height <b>IN</b>: int, the height of the image
+ * @param palette <b>OUT</b>: byte[], the color look up table
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>NOTE:</b> to read into a Java array use the alternative
+ * routine below.
+ *
+ * @return data = imagedata: the image in an array of bytes,
+ * palette: the look up table, in an array of bytes
+ */
+ public static native boolean DFR8getimage(String fileName, byte[] imagedata,
+ int width, int height, byte[] palette) throws HDFException;
+
+ /**
+ * @param fileName <b>IN</b>: String, the file
+ * @param theImagedata <b>OUT</b>: Object, the image, in a java
+ * array of appropriate size and type
+ * @param width <b>IN</b>: int, the width of the image
+ * @param height <b>IN</b>: int, the height of the image
+ * @param palette <b>OUT</b>: byte[], the color look up table
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>Note:</b> reads the data as a contiguous
+ * array of bytes and then converts it to an appropriate Java object.
+ *
+ * @return data = theImagedata: the value of the attribute,
+ * in an array of Java objects
+ * palette: the look up table, in an array of bytes
+ */
+ public static boolean DFR8getimage(String fileName, Object theImagedata,int width,
+ int height, byte[] palette) throws HDFException
+ {
+ byte[] data;
+ boolean rval;
+
+ HDFArray theArray = new HDFArray(theImagedata);
+ data = theArray.emptyBytes();
+ rval = DFR8getimage(fileName, data, width, height, palette);
+ theImagedata = theArray.arrayify(data);
+ return rval;
+ }
+
+ public static native short DFR8lastref() throws HDFException;
+
+ public static native boolean DFR8restart() throws HDFException;
+
+ public static native boolean DFR8readref(String filename, int ref) throws HDFException;
+
+ public static native int DFR8nimages(String fileName) throws HDFException;
+
+ /**
+ * @param filename <b>IN</b>: String, the file
+ * @param image <b>IN</b>: byte[], the image, in an array of
+ * bytes
+ * @param width <b>IN</b>: int, the width of the image
+ * @param height <b>IN</b>: int, the height of the image
+ * @param compress <b>IN</b>: short, the type of compression
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>NOTE:</b> to write from a Java array use the alternative
+ * routine below.
+ *
+ * @return true on success
+ */
+ public static native boolean DFR8addimage(String filename, byte[] image,
+ int width, int height, short compress) throws HDFException;
+
+ /**
+ * @param filename <b>IN</b>: String, the file
+ * @param theImage <b>IN</b>: Object, the image, in a java
+ * array of appropriate size and type
+ * @param width <b>IN</b>: int, the width of the image
+ * @param height <b>IN</b>: int, the height of the image
+ * @param compress <b>IN</b>: short, the type of compression
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>Note:</b> converts the data into a contiguous
+ * array of bytes and then writes it to the file
+ *
+ * @return true on success
+ */
+ public static boolean DFR8addimage(String filename, Object theImage, int width, int height,
+ short compress) throws HDFException
+ {
+ byte[] data;
+ HDFArray theArray = new HDFArray(theImage);
+ data = theArray.byteify();
+ return DFR8addimage(filename, data, width, height, compress);
+ }
+
+ /**
+ * @param filename <b>IN</b>: String, the file
+ * @param image <b>IN</b>: byte[], the image, in an array of
+ * bytes
+ * @param width <b>IN</b>: int, the width of the image
+ * @param height <b>IN</b>: int, the height of the image
+ * @param compress <b>IN</b>: short, the type of compression
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>NOTE:</b> to write from a Java array use the alternative
+ * routine below.
+ *
+ * @return true on success
+ */
+ public static native boolean DFR8putimage(String filename, byte[] image,
+ int width, int height, short compress) throws HDFException;
+
+ /**
+ * @param filename <b>IN</b>: String, the file
+ * @param theImage <b>IN</b>: Object, the image, in a java
+ * array of appropriate size and type
+ * @param width <b>IN</b>: int, the width of the image
+ * @param height <b>IN</b>: int, the height of the image
+ * @param compress <b>IN</b>: short, the type of compression
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ *
+ * <p><b>Note:</b> converts the data into a contiguous
+ * array of bytes and then writes it to the file
+ *
+ * @return true on success
+ */
+ public static boolean DFR8putimage(String filename, Object theImage, int width, int height,
+ short compress) throws HDFException
+ {
+ byte[] data;
+ HDFArray theArray = new HDFArray(theImage);
+ data = theArray.byteify();
+ return DFR8putimage(filename, data, width, height, compress);
+ }
+
+ /**
+ * DFR8setcompress sets compression scheme for 8-bit image
+ *
+ * @param type <b>IN</b>: int, the type of compression
+ * @param cinfo <b>IN</b>: HDFCompInfo, the compression parameters
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ * @return true on success
+ */
+ public static native boolean DFR8setcompress(int type, HDFCompInfo cinfo) throws HDFException;
+
+ /**
+ * @param palref <b>OUT</b>: short[1], the HDF ref of the palette
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call.
+ *
+ * @return palref[0] = the ref of the palette
+ */
+ public static native boolean DFR8getpalref(short[] palref) throws HDFException;
+
+ public static native boolean DFR8setpalette(byte[] palette) throws HDFException;
+
+ public static native boolean DFR8writeref(String filename, short ref) throws HDFException;
+
+ /**
+ * New API for hdf-42r1
+ *
+ * @param coder_type comp_coder_t enum for determining which type of encoding is being done
+ *
+ * @exception hdf.hdflib.HDFException
+ * should be thrown for errors in the
+ * HDF library call, but is not yet implemented.
+ *
+ *
+ * @return the compression config info value
+ */
+ public static native int HCget_config_info(int coder_type) throws HDFException;
+
+}
diff --git a/java/src/hdf/hdflib/HDFLibraryException.java b/java/src/hdf/hdflib/HDFLibraryException.java
new file mode 100644
index 0000000..6e0a262
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFLibraryException.java
@@ -0,0 +1,108 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+
+/**
+ * <p>
+ * The class HDFException returns errors raised by the HDF
+ * library.
+ * <p>
+ * In principle, this includes any and all errors possible
+ * from the HDF library. However, most error conditions
+ * are not yet detected in this version of the Java
+ * interface. This will be added in future releases.
+ *
+ * The only HDF library error currently raised are errors
+ * in Hopen, such as ``file not found''.
+ */
+
+
+public class HDFLibraryException extends HDFException
+{
+
+ public HDFLibraryException() {
+ super();
+ }
+
+ public HDFLibraryException(String s) {
+ super("HDFLibraryException: "+s);
+ }
+
+ public HDFLibraryException(int err) {
+ super(err);
+ }
+
+ @Override
+ public String getMessage() {
+ if (msg != null) {
+ return msg;
+ }
+
+ String s;
+ try {
+ s = HDFLibrary.HEstring(HDFerror);
+ }
+ catch (HDFException e) {
+ s = new String("HDF error number: "+HDFerror+", HEstring failed");
+ }
+ msg = "HDFLibraryException: "+s;
+ return msg;
+ }
+
+ /**
+ * Prints this <code>HDFLibraryException</code>, the HDF Library error
+ * stack, and and the Java stack trace to the standard error stream.
+ */
+ @Override
+ public void printStackTrace() {
+ System.err.println(this);
+ printStackTrace0(null); // the HDF Library error stack
+ super.printStackTrace(); // the Java stack trace
+ }
+
+ /**
+ * Prints this <code>HDFLibraryException</code> the HDF Library error
+ * stack, and and the Java stack trace to the specified print stream.
+ *
+ * @param f
+ * the file print stream.
+ */
+ public void printStackTrace(java.io.File f) {
+ if ((f == null) || !f.exists() || f.isDirectory() || !f.canWrite()) {
+ printStackTrace();
+ }
+ else {
+ try {
+ java.io.FileOutputStream o = new java.io.FileOutputStream(f);
+ java.io.PrintWriter p = new java.io.PrintWriter(o);
+ p.println(this);
+ p.close();
+ }
+ catch (Exception ex) {
+ System.err.println(this);
+ }
+ ;
+ // the HDF Library error stack
+ printStackTrace0(f.getPath());
+ super.printStackTrace(); // the Java stack trace
+ }
+ }
+
+ /*
+ * This private method calls the HDF library to extract the error codes
+ * and error stack.
+ */
+ private native void printStackTrace0(String s);
+}
diff --git a/java/src/hdf/hdflib/HDFNBITChunkInfo.java b/java/src/hdf/hdflib/HDFNBITChunkInfo.java
new file mode 100644
index 0000000..9cabaf2
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFNBITChunkInfo.java
@@ -0,0 +1,47 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+/**
+ * <p>
+ * This class is a container for the parameters to the HDF
+ * NBIT compressed chunked class.
+ * <p>
+ * In this case, the information is the start bit, len, sign extension
+ * and fill.
+ */
+
+
+public class HDFNBITChunkInfo extends HDFChunkInfo {
+
+ public int[] chunk_lengths = new int[HDFConstants.MAX_VAR_DIMS];
+ public int start_bit = 0;
+ public int bit_len = 0;
+ public int sign_ext = 0;
+ public int fill_one = 0;
+
+ public HDFNBITChunkInfo() {
+ ctype = HDFConstants.HDF_NBIT;
+ };
+
+ public HDFNBITChunkInfo( int[] cl, int sb, int bl, int se, int fo) {
+ ctype = HDFConstants.HDF_NBIT;
+ chunk_lengths = cl;
+ start_bit = sb;
+ bit_len = bl;
+ sign_ext = se;
+ fill_one = fo;
+ }
+
+}
diff --git a/java/src/hdf/hdflib/HDFNBITCompInfo.java b/java/src/hdf/hdflib/HDFNBITCompInfo.java
new file mode 100644
index 0000000..b6796a3
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFNBITCompInfo.java
@@ -0,0 +1,47 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+/**
+ * <p>
+ * This class is a container for the parameters to the HDF
+ * ``NBIT'' compression class.
+ * <p>
+ * In this case, the information needed is the number type,
+ * the sign extension, the fill bit, the start bit, and the
+ * number of bits to store.
+ */
+
+public class HDFNBITCompInfo extends HDFNewCompInfo {
+
+ public int nt; /* number type of the data to encode */
+ public int sign_ext; /* whether to sign extend or not */
+ public int fill_one; /* whether to fill with 1's or 0's */
+ public int start_bit; /* offset of the start bit in the data */
+ public int bit_len; /* number of bits to store */
+
+ public HDFNBITCompInfo() {
+ ctype = HDFConstants.COMP_CODE_NBIT;
+ }
+
+ public HDFNBITCompInfo(
+ int Nt,
+ int Sign_ext,
+ int Fill_one,
+ int Start_bit,
+ int Bit_len) {
+ ctype = HDFConstants.COMP_CODE_NBIT;
+ }
+
+}
diff --git a/java/src/hdf/hdflib/HDFNativeData.java b/java/src/hdf/hdflib/HDFNativeData.java
new file mode 100644
index 0000000..b17d788
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFNativeData.java
@@ -0,0 +1,166 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+public class HDFNativeData
+{
+ public HDFNativeData() {}
+ public static native int[] byteToInt( byte[] data );
+ public static native float[] byteToFloat( byte[] data );
+ public static native short[] byteToShort( byte[] data );
+ public static native long[] byteToLong( byte[] data );
+ public static native double[] byteToDouble( byte[] data );
+
+ public static native int[] byteToInt( int start, int len, byte[] data );
+ public static int byteToInt( byte[] data, int start)
+ {
+ int []ival = new int[1];
+ ival = byteToInt(start,1,data);
+ return(ival[0]);
+ }
+
+ public static native short[] byteToShort( int start, int len, byte[] data );
+ public static short byteToShort( byte[] data, int start)
+ {
+ short []sval = new short[1];
+ sval = byteToShort(start,1,data);
+ return(sval[0]);
+ }
+
+ public static native float[] byteToFloat( int start, int len, byte[] data );
+ public static float byteToFloat( byte[] data, int start)
+ {
+ float []fval = new float[1];
+ fval = byteToFloat(start,1,data);
+ return(fval[0]);
+ }
+
+ public static native long[] byteToLong( int start, int len, byte[] data );
+ public static long byteToLong( byte[] data, int start)
+ {
+ long []lval = new long[1];
+ lval = byteToLong(start,1,data);
+ return(lval[0]);
+ }
+
+ public static native double[] byteToDouble( int start, int len, byte[] data );
+ public static double byteToDouble( byte[] data, int start)
+ {
+ double []dval = new double[1];
+ dval = byteToDouble(start,1,data);
+ return(dval[0]);
+ }
+
+ public static native byte[] intToByte( int start, int len, int[] data);
+ public static native byte[] shortToByte( int start, int len, short[] data);
+ public static native byte[] floatToByte( int start, int len, float[] data);
+ public static native byte[] longToByte( int start, int len, long[] data);
+ public static native byte[] doubleToByte( int start, int len, double[] data);
+
+ public static native byte[] byteToByte( byte data);
+ static byte[] byteToByte( Byte data){return byteToByte(data.byteValue());}
+ public static native byte[] intToByte( int data);
+ static byte[] intToByte( Integer data){return intToByte(data.intValue());}
+ public static native byte[] shortToByte(short data);
+ static byte[] shortToByte( Short data){return shortToByte(data.shortValue());}
+ public static native byte[] floatToByte( float data);
+ static byte[] floatToByte( Float data){return floatToByte(data.floatValue());};
+ public static native byte[] longToByte( long data);
+ static byte[] longToByte(Long data){ return longToByte(data.longValue());}
+ public static native byte[] doubleToByte( double data);
+ static byte[] doubleToByte( Double data){return doubleToByte(data.doubleValue());}
+
+ public Object byteToNumber( byte[] barray, Object obj)
+ throws HDFException
+ {
+ Class theClass = obj.getClass();
+ String type = theClass.getName();
+ Object retobj = null;
+
+ if (type.equals("java.lang.Integer")) {
+ int[] i = hdf.hdflib.HDFNativeData.byteToInt(0,1,barray);
+ retobj = new Integer(i[0]);
+ } else if (type.equals("java.lang.Byte")) {
+ retobj = new Byte(barray[0]);
+ } else if (type.equals("java.lang.Short")) {
+ short[] f = hdf.hdflib.HDFNativeData.byteToShort(0,1,barray);
+ retobj = new Short(f[0]) ;
+ } else if (type.equals("java.lang.Float")) {
+ float[] f = hdf.hdflib.HDFNativeData.byteToFloat(0,1,barray);
+ retobj = new Float(f[0]) ;
+ } else if (type.equals("java.lang.Long")) {
+ long[] f = hdf.hdflib.HDFNativeData.byteToLong(0,1,barray);
+ retobj = new Long(f[0]) ;
+ } else if (type.equals("java.lang.Double")) {
+ double[] f = hdf.hdflib.HDFNativeData.byteToDouble(0,1,barray);
+ retobj = new Double(f[0] );
+ } else {
+ /* exception: unsupprted type */
+ HDFException ex =
+ new HDFJavaException("byteToNumber: setfield bad type: "+obj+" "+type);
+ throw(ex);
+ }
+ return(retobj);
+ }
+
+ /**
+ * Allocate a 1D array large enough to hold a multidimensional
+ * array of 'datasize' elements of 'dataType' numbers.
+ * This is called from hdf.hdfobject.HDFGR and
+ * hdf.hdfobject.HDFSDS, and hdf.io.ASCII2HDF
+ *
+ * @param dataType the type of the iamge data
+ * @param datasize the size of the image data array
+ * @return an array of 'datasize' numbers of 'dataType
+ *
+ */
+public static Object defineDataObject(int dataType, int datasize)
+ {
+ Object data = null;
+
+ if ((dataType & HDFConstants.DFNT_LITEND) != 0) {
+ dataType -= HDFConstants.DFNT_LITEND;
+ }
+
+ switch(dataType)
+ {
+ case HDFConstants.DFNT_INT16:
+ case HDFConstants.DFNT_UINT16:
+ data = new short[datasize];
+ break;
+ case HDFConstants.DFNT_INT32:
+ case HDFConstants.DFNT_UINT32:
+ data = new int[datasize];
+ break;
+ case HDFConstants.DFNT_INT64:
+ case HDFConstants.DFNT_UINT64:
+ data = new long[datasize];
+ break;
+ case HDFConstants.DFNT_FLOAT32:
+ data = new float[datasize];
+ break;
+ case HDFConstants.DFNT_FLOAT64:
+ data = new double[datasize];
+ break;
+ default:
+ case HDFConstants.DFNT_CHAR:
+ case HDFConstants.DFNT_UCHAR8:
+ case HDFConstants.DFNT_UINT8:
+ case HDFConstants.DFNT_INT8:
+ data = new byte[datasize];
+ break;
+ }
+ return data;
+ }
+}
diff --git a/java/src/hdf/hdflib/HDFNewCompInfo.java b/java/src/hdf/hdflib/HDFNewCompInfo.java
new file mode 100644
index 0000000..96f5bae
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFNewCompInfo.java
@@ -0,0 +1,33 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+/**
+ * <p>
+ * This class is a generic container for the parameters to the HDF
+ * compressed classes, with the ``new'' encoding.
+ * <p>
+ * Compression parameters are expressed as instances of sub-classes
+ * of this type.
+ */
+
+
+public class HDFNewCompInfo extends HDFCompInfo {
+ public int ctype; /* from COMP_CODE_ENUM */
+ public HDFNewCompInfo() {
+ ctype = HDFConstants.COMP_CODE_NONE;
+ } ;
+}
+
+
diff --git a/java/src/hdf/hdflib/HDFNotImplementedException.java b/java/src/hdf/hdflib/HDFNotImplementedException.java
new file mode 100644
index 0000000..db5a00c
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFNotImplementedException.java
@@ -0,0 +1,44 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+/**
+ * <p>
+ * HDFNotImplementedException indicates a function that is part
+ * of the HDF API, but which cannot or will not be implemented
+ * for Java.
+ * <p>
+ * For instance, C routines which take Unix FILE objects
+ * as parameters are not appropriate for the Java interface
+ * and will not be implemented. These routines will raise
+ * an HDFNotImplementedException.
+ */
+
+public class HDFNotImplementedException extends HDFJavaException
+{
+ String msg;
+
+ public HDFNotImplementedException() {
+ HDFerror = 0;
+ }
+
+ public HDFNotImplementedException(String s) {
+ msg = "HDFJavaException: HDF function not implmented (yet): "+s;
+ }
+
+ @Override
+ public String getMessage() {
+ return msg;
+ }
+}
diff --git a/java/src/hdf/hdflib/HDFOldCompInfo.java b/java/src/hdf/hdflib/HDFOldCompInfo.java
new file mode 100644
index 0000000..aeba904
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFOldCompInfo.java
@@ -0,0 +1,33 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+/**
+ * <p>
+ * This class is a generic container for the parameters to the HDF
+ * compressed classes, with the ``ole'' encoding.
+ * <p>
+ * Compression parameters are expressed as instances of sub-classes
+ * of this type.
+ */
+
+
+public class HDFOldCompInfo extends HDFCompInfo {
+ public int ctype; /* from COMP_NONE defines */
+ public HDFOldCompInfo() {
+ ctype = HDFConstants.COMP_NONE;
+ } ;
+}
+
+
diff --git a/java/src/hdf/hdflib/HDFOldRLECompInfo.java b/java/src/hdf/hdflib/HDFOldRLECompInfo.java
new file mode 100644
index 0000000..8c580ad
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFOldRLECompInfo.java
@@ -0,0 +1,30 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+/**
+ * <p>
+ * This class is a generic container for the parameters to the HDF
+ * RLE compressed classes, with the ``old'' type encoding.
+ * <p>
+ * In this case, there is no auxilliary information.
+ */
+
+public class HDFOldRLECompInfo extends HDFOldCompInfo {
+
+ public HDFOldRLECompInfo () {
+ ctype = HDFConstants.COMP_RLE;
+ }
+
+}
diff --git a/java/src/hdf/hdflib/HDFRLECompInfo.java b/java/src/hdf/hdflib/HDFRLECompInfo.java
new file mode 100644
index 0000000..83d9d05
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFRLECompInfo.java
@@ -0,0 +1,29 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+/**
+ * <p>
+ * This class is a generic container for the parameters to the HDF
+ * RLE compressed classes, with the ``new'' type encoding.
+ * <p>
+ * In this case, there is no auxilliary information.
+ */
+public class HDFRLECompInfo extends HDFNewCompInfo {
+
+ public HDFRLECompInfo () {
+ ctype = HDFConstants.COMP_CODE_RLE;
+ }
+
+}
diff --git a/java/src/hdf/hdflib/HDFSKPHUFFCompInfo.java b/java/src/hdf/hdflib/HDFSKPHUFFCompInfo.java
new file mode 100644
index 0000000..fcf3f69
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFSKPHUFFCompInfo.java
@@ -0,0 +1,32 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+/**
+ * <p>
+ * This class is a generic container for the parameters to the HDF
+ * ``Skipping Huffman'' compression classes.
+ * <p>
+ * In this case, the information is the skip size
+ */
+public class HDFSKPHUFFCompInfo extends HDFNewCompInfo {
+
+ public int skp_size;
+
+ public HDFSKPHUFFCompInfo() {
+ ctype = HDFConstants.COMP_CODE_SKPHUFF;
+ } ;
+}
+
+
diff --git a/java/src/hdf/hdflib/HDFSZIPCompInfo.java b/java/src/hdf/hdflib/HDFSZIPCompInfo.java
new file mode 100644
index 0000000..70ceb83
--- /dev/null
+++ b/java/src/hdf/hdflib/HDFSZIPCompInfo.java
@@ -0,0 +1,53 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+package hdf.hdflib;
+
+/**
+ * <p>
+ * This class is a container for the parameters to the HDF
+ * SZIP compression algorithm.
+ * <p>
+ * In this case, the only parameter is the ``level'' of deflation.
+ */
+
+
+public class HDFSZIPCompInfo extends HDFNewCompInfo {
+
+ public int bits_per_pixel;
+ public int options_mask;
+ public int pixels;
+ public int pixels_per_block;
+ public int pixels_per_scanline;
+
+ public HDFSZIPCompInfo() {
+ ctype = HDFConstants.COMP_CODE_SZIP;
+ }
+
+ public HDFSZIPCompInfo(
+ int bits_per_pixel_in,
+ int options_mask_in,
+ int pixels_in,
+ int pixels_per_block_in,
+ int pixels_per_scanline_in) {
+ ctype = HDFConstants.COMP_CODE_SZIP;
+ bits_per_pixel = bits_per_pixel_in;
+ options_mask = options_mask_in;
+ pixels = pixels_in;
+ pixels_per_block = pixels_per_block_in;
+ pixels_per_scanline = pixels_per_scanline_in;
+
+ }
+}
+
+
diff --git a/java/src/hdf/overview.html b/java/src/hdf/overview.html
new file mode 100644
index 0000000..641227a
--- /dev/null
+++ b/java/src/hdf/overview.html
@@ -0,0 +1,94 @@
+<body>
+
+<h1>Java HDF Interface (JHI4)</h1>
+
+<h2><u>What it is</u></h2>
+The <b>Java HDF Interface (JHI4)</b> is a Java package
+(<a href="../../hdf-java-html/javadocs/hdf/hdflib/package-summary.html">hdf.hdflib</a>)
+that ``wraps around'' the HDF library.
+<p />There are a large number of functions in the HDF
+library (version 4.2). Some of the functions are not supported in JHI4.
+
+<center><table BORDER=1 COLS=1 WIDTH="71%" BGCOLOR="#dbeaf5" >
+<tr>
+<td>
+<center>Note: The JHI4 only supports HDF4.</center>
+</td>
+</tr>
+</table></center>
+
+<p>The JHI4 may be used by any Java application that needs to access HDF
+files. It is extremely important to emphasize that <i>this package is not
+a pure Java implementation of the HDF library.</i> The JHI4 calls the
+same HDF library that is used by C or FORTRAN programs. (Note that this
+product cannot be used in most network browsers because it accesses the
+local disk using native code.)
+<p>The Java HDF Interface consists of Java classes and a dynamically
+linked native library. The Java classes declare native methods, and the
+library contains C functions which implement the native methods. The C
+functions call the standard HDF library, which is linked as part of the
+same library on most platforms.
+<p>The central part of the JHI4 is the Java class <i>
+<a href="../../hdf-java-html/javadocs/hdf/hdflib/HDFLibrary.html">hdf.hdflib.HDFLibrary</a></i>.
+The <i>HDFLibrary</i> class calls the standard (<i>i.e.</i>, `native' code) HDF
+library, with native methods for most of the HDF5functions.
+
+<h3>
+<u>How to use it</u></h3>
+The JHI4 is used by Java classes to call the HDF library, in order to
+create HDF files, and read and write data in existing HDF files.
+<p>For example, the HDF library has the function <b>Hopen</b> to open
+an HDF file. The Java interface is the class <i>
+<a href="../../hdf-java-html/javadocs/hdf/hdflib/HDFLibrary.html">hdf.hdflib.HDFLibrary</a></i>,
+which has a method:
+<pre><b>static native int Hopen(String filename, int flags, int access );</b></pre>
+The native method is implemented in C using the
+<a href="http://java.sun.com/javase/6/docs/technotes/guides/jni/index.html">Java
+Native Method Interface </a>(JNI). This is written something like the following:
+<pre><b>JNIEXPORT jint
+JNICALL Java_hdf_hdflib_HDFLibrary_Hopen
+(
+ JNIEnv *env,
+ jclass class,
+ jstring hdfFile,
+ jint flags,
+ jint access)
+ {
+
+ /* ...convert Java String to (char *) */
+
+ /* call the HDF library */
+ retVal = Hopen((char *)file, (unsigned)flags, (hid_t)access );
+
+ /* ... */
+}</b></pre>
+This C function calls the HDF library and returns the result appropriately.
+<p>There is one native method for each HDF entry point (several hundred
+in all), which are compiled with the HDF library into a dynamically loaded
+library (<i>libhdf_java</i>). Note that this library must be built for each
+platform.
+<p>To call the HDF `<b><i>Hopen</i></b>' function, a Java program would
+import the package '<i><b>hdf.hdflib.*</b>'</i>, and invoke the method
+on the class '<b><i>HDFLibrary</i></b>'. The Java program would look something
+like this:
+<pre><b>import hdf.hdflib.*;
+
+{
+ /* ... */
+
+ try {
+ file = HDFLibrary.Hopen("myFile.hdf", flags, access );
+ } catch (HDFException ex) {
+ //...
+ }
+
+ /* ... */
+}</b></pre>
+The <i><b>HDFLibrary</b> </i>class automatically loads the native method implementations
+and the HDF library.
+
+<h3>
+<a NAME="DOWNLOAD"></a>To Obtain</h3>
+The JHI4 is included with the <a href="http://www.hdfgroup.org/HDF/index.html">HDF</a> library.
+
+</body>
diff --git a/java/src/jni/CMakeLists.txt b/java/src/jni/CMakeLists.txt
new file mode 100644
index 0000000..aa6dc60
--- /dev/null
+++ b/java/src/jni/CMakeLists.txt
@@ -0,0 +1,68 @@
+cmake_minimum_required (VERSION 3.1.0)
+PROJECT (HDF4_JAVA_JNI C)
+
+set (HDF4_JAVA_JNI_CSRCS
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfanImp.c
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfdfpalImp.c
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfdfuImp.c
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfexceptionImp.c
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfgrImp.c
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfheImp.c
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfhxImp.c
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfImp.c
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfnativeImp.c
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfr24Imp.c
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfr8Imp.c
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfsdsImp.c
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfstructsutil.c
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfvdataImp.c
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfvfImp.c
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfvgroupImp.c
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfvhImp.c
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfvqImp.c
+ ${HDF4_JAVA_JNI_SOURCE_DIR}/hdfvsqImp.c
+)
+
+INCLUDE_DIRECTORIES ( ${HDF4_JAVA_JNI_SOURCE_DIR} )
+
+set (CMAKE_INCLUDE_CURRENT_DIR_IN_INTERFACE ON)
+
+########### JNI libraries always must be built shared ###############
+add_library (${HDF4_JAVA_JNI_LIB_TARGET} SHARED ${HDF4_JAVA_JNI_CSRCS})
+TARGET_C_PROPERTIES (${HDF4_JAVA_JNI_LIB_TARGET} SHARED " " " ")
+if (WIN32)
+ TARGET_LINK_LIBRARIES (${HDF4_JAVA_JNI_LIB_TARGET} ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} "ws2_32.lib")
+else (WIN32)
+ TARGET_LINK_LIBRARIES (${HDF4_JAVA_JNI_LIB_TARGET} ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET})
+endif (WIN32)
+set_target_properties (${HDF4_JAVA_JNI_LIB_TARGET} PROPERTIES FOLDER libraries/jni)
+SET_GLOBAL_VARIABLE (HDF4_JAVA_LIBRARIES_TO_EXPORT "${HDF4_JAVA_LIBRARIES_TO_EXPORT};${HDF4_JAVA_JNI_LIB_TARGET}")
+H4_SET_LIB_OPTIONS (${HDF4_JAVA_JNI_LIB_TARGET} ${HDF4_JAVA_JNI_LIB_NAME} SHARED)
+if (WIN32)
+ get_filename_component (HDF4_JAVA_JNI_DLL_NAME ${HDF4_JAVA_JNI_LIB_TARGET} NAME_WE)
+ # message (STATUS "HDF4_JAVA_JNI_DLL_NAME: ${HDF4_JAVA_JNI_DLL_NAME}")
+ if (BUILD_TESTING)
+ add_custom_target (HDF4_JAVA_JNI-Test-Copy ALL
+ COMMAND ${CMAKE_COMMAND} -E copy_if_different ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${CMAKE_CFG_INTDIR}/${HDF4_JAVA_JNI_DLL_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX} ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${HDF4_JAVA_JNI_DLL_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}
+ COMMENT "Copying ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${CMAKE_CFG_INTDIR}/${HDF4_JAVA_JNI_DLL_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX} to ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/"
+ )
+ add_dependencies (HDF4_JAVA_JNI-Test-Copy ${HDF4_JAVA_JNI_LIB_TARGET})
+ endif (BUILD_TESTING)
+endif (WIN32)
+
+#-----------------------------------------------------------------------------
+# Add Target(s) to CMake Install for import into other projects
+#-----------------------------------------------------------------------------
+if (HDF4_EXPORTED_TARGETS)
+ INSTALL_TARGET_PDB (${HDF4_JAVA_JNI_LIB_TARGET} ${HDF4_INSTALL_BIN_DIR} libraries)
+ install (
+ TARGETS
+ ${HDF4_JAVA_JNI_LIB_TARGET}
+ EXPORT
+ ${HDF4_JAVA_EXPORTED_TARGETS}
+ LIBRARY DESTINATION ${HDF4_INSTALL_LIB_DIR} COMPONENT libraries
+ ARCHIVE DESTINATION ${HDF4_INSTALL_LIB_DIR} COMPONENT libraries
+ RUNTIME DESTINATION ${HDF4_INSTALL_LIB_DIR} COMPONENT libraries
+ FRAMEWORK DESTINATION ${HDF4_INSTALL_FWRK_DIR} COMPONENT libraries
+ )
+endif (HDF4_EXPORTED_TARGETS)
diff --git a/java/src/jni/Makefile.am b/java/src/jni/Makefile.am
new file mode 100644
index 0000000..196612d
--- /dev/null
+++ b/java/src/jni/Makefile.am
@@ -0,0 +1,32 @@
+#
+# HDF Java native interface (JNI) Library Makefile(.in)
+#
+
+include $(top_srcdir)/config/commence.am
+
+# Mark this directory as part of the JNI API
+JAVA_API=yes
+AM_CFLAGS=-DNDEBUG -fPIC
+
+# Include src directory and JNI flags
+AM_CPPFLAGS=-I$(top_srcdir)/hdf/src \
+ -I$(top_srcdir)/mfhdf/libsrc \
+ -I$(top_builddir)/mfhdf/libsrc -I$(top_srcdir)/java/src/jni $(JNIFLAGS)
+
+# This is our main target
+lib_LTLIBRARIES=libhdf_java.la
+
+# Source files for the library
+libhdf_java_la_SOURCES=hdfanImp.c hdfdfpalImp.c \
+ hdfdfuImp.c hdfexceptionImp.c hdfgrImp.c hdfheImp.c hdfhxImp.c hdfImp.c hdfnativeImp.c \
+ hdfr24Imp.c hdfr8Imp.c hdfsdsImp.c hdfstructsutil.c hdfvdataImp.c hdfvfImp.c hdfvgroupImp.c \
+ hdfvhImp.c hdfvqImp.c hdfvsqImp.c
+
+# HDF Java (JNI) library depends on HDF Library.
+libhdf_java_la_LIBADD=$(LIBMFHDF) $(LIBHDF) @LIBS@
+libhdf_java_la_DEPENDENCIES = $(LIBMFHDF) $(LIBHDF)
+libhdf_java_la_INCLUDES=-I$(top_srcdir)/hdf/src \
+ -I$(top_srcdir)/mfhdf/libsrc \
+ -I$(top_builddir)/mfhdf/libsrc
+
+include $(top_srcdir)/config/conclude.am
diff --git a/mfhdf/xdr/Makefile.in b/java/src/jni/Makefile.in
similarity index 88%
copy from mfhdf/xdr/Makefile.in
copy to java/src/jni/Makefile.in
index 2385193..a406428 100644
--- a/mfhdf/xdr/Makefile.in
+++ b/java/src/jni/Makefile.in
@@ -15,10 +15,9 @@
@SET_MAKE@
#
-# XDR Library Makefile(.in)
+# HDF Java native interface (JNI) Library Makefile(.in)
#
-
VPATH = @srcdir@
am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)'
am__make_running_with_option = \
@@ -86,13 +85,24 @@ host_triplet = @host@
DIST_COMMON = $(top_srcdir)/config/commence.am \
$(top_srcdir)/config/conclude.am $(srcdir)/Makefile.in \
$(srcdir)/Makefile.am $(top_srcdir)/bin/mkinstalldirs \
- $(top_srcdir)/bin/depcomp $(include_HEADERS) \
- $(top_srcdir)/bin/test-driver README
-check_PROGRAMS = xdrtest$(EXEEXT)
-TESTS = $(am__EXEEXT_1)
-subdir = mfhdf/xdr
+ $(top_srcdir)/bin/depcomp $(top_srcdir)/bin/test-driver
+TESTS =
+subdir = java/src/jni
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -126,18 +136,18 @@ am__uninstall_files_from_dir = { \
|| { echo " ( cd '$$dir' && rm -f" $$files ")"; \
$(am__cd) "$$dir" && rm -f $$files; }; \
}
-am__installdirs = "$(DESTDIR)$(libdir)" "$(DESTDIR)$(includedir)"
+am__installdirs = "$(DESTDIR)$(libdir)"
LTLIBRARIES = $(lib_LTLIBRARIES)
-libxdr_la_LIBADD =
-am_libxdr_la_OBJECTS = xdr.lo xdrfloat.lo xdrstdio.lo xdrarray.lo
-libxdr_la_OBJECTS = $(am_libxdr_la_OBJECTS)
+am_libhdf_java_la_OBJECTS = hdfanImp.lo hdfdfpalImp.lo hdfdfuImp.lo \
+ hdfexceptionImp.lo hdfgrImp.lo hdfheImp.lo hdfhxImp.lo \
+ hdfImp.lo hdfnativeImp.lo hdfr24Imp.lo hdfr8Imp.lo \
+ hdfsdsImp.lo hdfstructsutil.lo hdfvdataImp.lo hdfvfImp.lo \
+ hdfvgroupImp.lo hdfvhImp.lo hdfvqImp.lo hdfvsqImp.lo
+libhdf_java_la_OBJECTS = $(am_libhdf_java_la_OBJECTS)
AM_V_lt = $(am__v_lt_ at AM_V@)
am__v_lt_ = $(am__v_lt_ at AM_DEFAULT_V@)
am__v_lt_0 = --silent
am__v_lt_1 =
-am_xdrtest_OBJECTS = xdrtest.$(OBJEXT)
-xdrtest_OBJECTS = $(am_xdrtest_OBJECTS)
-xdrtest_LDADD = $(LDADD)
AM_V_P = $(am__v_P_ at AM_V@)
am__v_P_ = $(am__v_P_ at AM_DEFAULT_V@)
am__v_P_0 = false
@@ -172,14 +182,13 @@ AM_V_CCLD = $(am__v_CCLD_ at AM_V@)
am__v_CCLD_ = $(am__v_CCLD_ at AM_DEFAULT_V@)
am__v_CCLD_0 = @echo " CCLD " $@;
am__v_CCLD_1 =
-SOURCES = $(libxdr_la_SOURCES) $(xdrtest_SOURCES)
-DIST_SOURCES = $(libxdr_la_SOURCES) $(xdrtest_SOURCES)
+SOURCES = $(libhdf_java_la_SOURCES)
+DIST_SOURCES = $(libhdf_java_la_SOURCES)
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
-HEADERS = $(include_HEADERS)
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
# Read a list of newline-separated strings from the standard input,
# and print each of them once, without duplicates. Input order is
@@ -356,10 +365,12 @@ am__set_TESTS_bases = \
bases=`echo $$bases`
RECHECK_LOGS = $(TEST_LOGS)
AM_RECURSIVE_TARGETS = check recheck
-am__EXEEXT_1 = xdrtest$(EXEEXT)
TEST_SUITE_LOG = test-suite.log
-LOG_DRIVER = $(SHELL) $(top_srcdir)/bin/test-driver
-LOG_COMPILE = $(LOG_COMPILER) $(AM_LOG_FLAGS) $(LOG_FLAGS)
+am__test_logs1 = $(TESTS:=.log)
+am__test_logs2 = $(am__test_logs1:@EXEEXT at .log=.log)
+TEST_LOGS = $(am__test_logs2:.sh.log=.log)
+SH_LOG_DRIVER = $(SHELL) $(top_srcdir)/bin/test-driver
+SH_LOG_COMPILE = $(SH_LOG_COMPILER) $(AM_SH_LOG_FLAGS) $(SH_LOG_FLAGS)
am__set_b = \
case '$@' in \
*/*) \
@@ -370,11 +381,6 @@ am__set_b = \
*) \
b='$*';; \
esac
-am__test_logs1 = $(TESTS:=.log)
-am__test_logs2 = $(am__test_logs1:@EXEEXT at .log=.log)
-TEST_LOGS = $(am__test_logs2:.sh.log=.log)
-SH_LOG_DRIVER = $(SHELL) $(top_srcdir)/bin/test-driver
-SH_LOG_COMPILE = $(SH_LOG_COMPILER) $(AM_SH_LOG_FLAGS) $(SH_LOG_FLAGS)
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = @ACLOCAL@
AMTAR = @AMTAR@
@@ -418,12 +424,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -466,11 +487,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
@@ -544,14 +568,34 @@ ACLOCAL_AMFLAGS = "-I m4"
# .chkexe files are used to mark tests that have run successfully.
# .chklog files are output from those tests.
CHECK_CLEANFILES = *.chkexe *.chklog
-AM_CPPFLAGS = -I$(top_srcdir)/hdf/src
-# XDR Library
-lib_LTLIBRARIES = libxdr.la
-libxdr_la_SOURCES = xdr.c xdrfloat.c xdrstdio.c xdrarray.c
-include_HEADERS = xdr.h types.h
-TEST_PROG = xdrtest
-xdrtest_SOURCES = xdrtest.c
+# Mark this directory as part of the JNI API
+JAVA_API = yes
+AM_CFLAGS = -DNDEBUG -fPIC
+
+# Include src directory and JNI flags
+AM_CPPFLAGS = -I$(top_srcdir)/hdf/src \
+ -I$(top_srcdir)/mfhdf/libsrc \
+ -I$(top_builddir)/mfhdf/libsrc -I$(top_srcdir)/java/src/jni $(JNIFLAGS)
+
+
+# This is our main target
+lib_LTLIBRARIES = libhdf_java.la
+
+# Source files for the library
+libhdf_java_la_SOURCES = hdfanImp.c hdfdfpalImp.c \
+ hdfdfuImp.c hdfexceptionImp.c hdfgrImp.c hdfheImp.c hdfhxImp.c hdfImp.c hdfnativeImp.c \
+ hdfr24Imp.c hdfr8Imp.c hdfsdsImp.c hdfstructsutil.c hdfvdataImp.c hdfvfImp.c hdfvgroupImp.c \
+ hdfvhImp.c hdfvqImp.c hdfvsqImp.c
+
+
+# HDF Java (JNI) library depends on HDF Library.
+libhdf_java_la_LIBADD = $(LIBMFHDF) $(LIBHDF) @LIBS@
+libhdf_java_la_DEPENDENCIES = $(LIBMFHDF) $(LIBHDF)
+libhdf_java_la_INCLUDES = -I$(top_srcdir)/hdf/src \
+ -I$(top_srcdir)/mfhdf/libsrc \
+ -I$(top_builddir)/mfhdf/libsrc
+
# Automake needs to be taught how to build lib, progs, and tests targets.
# These will be filled in automatically for the most part (e.g.,
@@ -583,9 +627,9 @@ $(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(top_srcdir
exit 1;; \
esac; \
done; \
- echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign mfhdf/xdr/Makefile'; \
+ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign java/src/jni/Makefile'; \
$(am__cd) $(top_srcdir) && \
- $(AUTOMAKE) --foreign mfhdf/xdr/Makefile
+ $(AUTOMAKE) --foreign java/src/jni/Makefile
.PRECIOUS: Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
@@ -641,21 +685,8 @@ clean-libLTLIBRARIES:
rm -f $${locs}; \
}
-libxdr.la: $(libxdr_la_OBJECTS) $(libxdr_la_DEPENDENCIES) $(EXTRA_libxdr_la_DEPENDENCIES)
- $(AM_V_CCLD)$(LINK) -rpath $(libdir) $(libxdr_la_OBJECTS) $(libxdr_la_LIBADD) $(LIBS)
-
-clean-checkPROGRAMS:
- @list='$(check_PROGRAMS)'; test -n "$$list" || exit 0; \
- echo " rm -f" $$list; \
- rm -f $$list || exit $$?; \
- test -n "$(EXEEXT)" || exit 0; \
- list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
- echo " rm -f" $$list; \
- rm -f $$list
-
-xdrtest$(EXEEXT): $(xdrtest_OBJECTS) $(xdrtest_DEPENDENCIES) $(EXTRA_xdrtest_DEPENDENCIES)
- @rm -f xdrtest$(EXEEXT)
- $(AM_V_CCLD)$(LINK) $(xdrtest_OBJECTS) $(xdrtest_LDADD) $(LIBS)
+libhdf_java.la: $(libhdf_java_la_OBJECTS) $(libhdf_java_la_DEPENDENCIES) $(EXTRA_libhdf_java_la_DEPENDENCIES)
+ $(AM_V_CCLD)$(LINK) -rpath $(libdir) $(libhdf_java_la_OBJECTS) $(libhdf_java_la_LIBADD) $(LIBS)
mostlyclean-compile:
-rm -f *.$(OBJEXT)
@@ -663,11 +694,25 @@ mostlyclean-compile:
distclean-compile:
-rm -f *.tab.c
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/xdr.Plo at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/xdrarray.Plo at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/xdrfloat.Plo at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/xdrstdio.Plo at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/xdrtest.Po at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfImp.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfanImp.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfdfpalImp.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfdfuImp.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfexceptionImp.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfgrImp.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfheImp.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfhxImp.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfnativeImp.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfr24Imp.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfr8Imp.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfsdsImp.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfstructsutil.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfvdataImp.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfvfImp.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfvgroupImp.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfvhImp.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfvqImp.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hdfvsqImp.Plo at am__quote@
.c.o:
@am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
@@ -695,27 +740,6 @@ mostlyclean-libtool:
clean-libtool:
-rm -rf .libs _libs
-install-includeHEADERS: $(include_HEADERS)
- @$(NORMAL_INSTALL)
- @list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(includedir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(includedir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_HEADER) $$files '$(DESTDIR)$(includedir)'"; \
- $(INSTALL_HEADER) $$files "$(DESTDIR)$(includedir)" || exit $$?; \
- done
-
-uninstall-includeHEADERS:
- @$(NORMAL_UNINSTALL)
- @list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(includedir)'; $(am__uninstall_files_from_dir)
ID: $(am__tagged_files)
$(am__define_uniq_tagged_files); mkid -fID $$unique
@@ -888,7 +912,7 @@ $(TEST_SUITE_LOG): $(TEST_LOGS)
echo "$$col$$br$$std"; \
fi; \
$$success || exit 1
-recheck: all $(check_PROGRAMS)
+recheck: all
@test -z "$(TEST_SUITE_LOG)" || rm -f $(TEST_SUITE_LOG)
@set +e; $(am__set_TESTS_bases); \
bases=`for i in $$bases; do echo $$i; done \
@@ -899,13 +923,6 @@ recheck: all $(check_PROGRAMS)
am__force_recheck=am--force-recheck \
TEST_LOGS="$$log_list"; \
exit $$?
-xdrtest.log: xdrtest$(EXEEXT)
- @p='xdrtest$(EXEEXT)'; \
- b='xdrtest'; \
- $(am__check_pre) $(LOG_DRIVER) --test-name "$$f" \
- --log-file $$b.log --trs-file $$b.trs \
- $(am__common_driver_flags) $(AM_LOG_DRIVER_FLAGS) $(LOG_DRIVER_FLAGS) -- $(LOG_COMPILE) \
- "$$tst" $(AM_TESTS_FD_REDIRECT)
.sh.log:
@p='$<'; \
$(am__set_b); \
@@ -952,12 +969,11 @@ distdir: $(DISTFILES)
fi; \
done
check-am: all-am
- $(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS)
$(MAKE) $(AM_MAKEFLAGS) check-TESTS
check: check-am
-all-am: Makefile $(LTLIBRARIES) $(HEADERS) all-local
+all-am: Makefile $(LTLIBRARIES) all-local
installdirs:
- for dir in "$(DESTDIR)$(libdir)" "$(DESTDIR)$(includedir)"; do \
+ for dir in "$(DESTDIR)$(libdir)"; do \
test -z "$$dir" || $(MKDIR_P) "$$dir"; \
done
install: install-am
@@ -995,8 +1011,8 @@ maintainer-clean-generic:
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
-clean-am: clean-checkPROGRAMS clean-generic clean-libLTLIBRARIES \
- clean-libtool mostlyclean-am
+clean-am: clean-generic clean-libLTLIBRARIES clean-libtool \
+ mostlyclean-am
distclean: distclean-am
-rm -rf ./$(DEPDIR)
@@ -1016,7 +1032,7 @@ info: info-am
info-am:
-install-data-am: install-includeHEADERS
+install-data-am:
install-dvi: install-dvi-am
@@ -1062,26 +1078,25 @@ ps: ps-am
ps-am:
-uninstall-am: uninstall-includeHEADERS uninstall-libLTLIBRARIES
+uninstall-am: uninstall-libLTLIBRARIES
.MAKE: check-am install-am install-strip
.PHONY: CTAGS GTAGS TAGS all all-am all-local check check-TESTS \
- check-am clean clean-checkPROGRAMS clean-generic \
- clean-libLTLIBRARIES clean-libtool cscopelist-am ctags \
- ctags-am distclean distclean-compile distclean-generic \
- distclean-libtool distclean-tags distdir dvi dvi-am html \
- html-am info info-am install install-am install-data \
- install-data-am install-dvi install-dvi-am install-exec \
- install-exec-am install-html install-html-am \
- install-includeHEADERS install-info install-info-am \
+ check-am clean clean-generic clean-libLTLIBRARIES \
+ clean-libtool cscopelist-am ctags ctags-am distclean \
+ distclean-compile distclean-generic distclean-libtool \
+ distclean-tags distdir dvi dvi-am html html-am info info-am \
+ install install-am install-data install-data-am install-dvi \
+ install-dvi-am install-exec install-exec-am install-html \
+ install-html-am install-info install-info-am \
install-libLTLIBRARIES install-man install-pdf install-pdf-am \
install-ps install-ps-am install-strip installcheck \
installcheck-am installdirs maintainer-clean \
maintainer-clean-generic mostlyclean mostlyclean-compile \
mostlyclean-generic mostlyclean-libtool mostlyclean-local pdf \
pdf-am ps ps-am recheck tags tags-am uninstall uninstall-am \
- uninstall-includeHEADERS uninstall-libLTLIBRARIES
+ uninstall-libLTLIBRARIES
# lib/progs/tests targets recurse into subdirectories. build-* targets
diff --git a/java/src/jni/h4jni.h b/java/src/jni/h4jni.h
new file mode 100644
index 0000000..51ede40
--- /dev/null
+++ b/java/src/jni/h4jni.h
@@ -0,0 +1,176 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF/doc/
+ *
+ */
+
+#include <jni.h>
+
+#ifndef _Included_h4jni
+#define _Included_h4jni
+
+#ifdef __cplusplus
+ #define ENVPTR (env)
+ #define ENVPAR
+ #define ENVONLY
+#else /* __cplusplus */
+ #define ENVPTR (*env)
+ #define ENVPAR env,
+ #define ENVONLY env
+#endif /* __cplusplus */
+
+/* Macros for class access */
+/* Calling code must define ret_obj as jobject */
+#define CALL_CONSTRUCTOR(classname,classsig,args) { \
+ jclass cls; \
+ jmethodID constructor; \
+ cls = ENVPTR->FindClass(ENVPAR (classname)); \
+ if (cls == 0) { \
+ h4JNIFatalError(env, "JNI error: GetObjectClass\n"); \
+ ret_obj = NULL; \
+ } \
+ constructor = ENVPTR->GetMethodID(ENVPAR cls, "<init>", (classsig)); \
+ if (constructor == 0) { \
+ h4JNIFatalError(env, "JNI error: GetMethodID failed\n"); \
+ ret_obj = NULL; \
+ } \
+ ret_obj = ENVPTR->NewObjectA(ENVPAR cls, constructor, (args)); \
+}
+
+/* Macros for error check */
+/* for now: use top of exception stack: fix this to do whole stack */
+#define CALL_ERROR_CHECK() { \
+ int16 errval; \
+ jclass jc; \
+ errval = HEvalue((int32)1); \
+ if (errval != DFE_NONE) { \
+ h4buildException(env, errval); \
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFLibraryException"); \
+ if (jc != NULL) \
+ ENVPTR->ThrowNew(ENVPAR jc,HEstring((hdf_err_code_t)errval)); \
+ } \
+}
+
+
+/* Macros for string access */
+#define PIN_JAVA_STRING(javastr,localstr) { \
+ jboolean isCopy; \
+ if ((javastr) == NULL) { \
+ (localstr) = NULL; \
+ h4nullArgument(env, "java string is NULL"); \
+ } \
+ else { \
+ (localstr) = ENVPTR->GetStringUTFChars(ENVPAR (javastr), &isCopy); \
+ if ((localstr) == NULL) { \
+ h4JNIFatalError(env, "local c string is not pinned"); \
+ } \
+ } \
+}
+
+#define UNPIN_JAVA_STRING(javastr,localstr) { \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (javastr), (localstr)); \
+}
+
+#define PIN_JAVA_STRING_TWO(javastr,localstr,java2str,local2str) { \
+ jboolean isCopy; \
+ if ((javastr) == NULL) { \
+ (localstr) = NULL; \
+ h4nullArgument(env, "java string is NULL"); \
+ } \
+ else if ((java2str) == NULL) { \
+ (local2str) = NULL; \
+ h4nullArgument(env, "second java string is NULL"); \
+ } \
+ else { \
+ (localstr) = ENVPTR->GetStringUTFChars(ENVPAR (javastr), &isCopy); \
+ if ((localstr) == NULL) { \
+ h4JNIFatalError(env, "local c string is not pinned"); \
+ } \
+ else { \
+ (local2str) = ENVPTR->GetStringUTFChars(ENVPAR (java2str), &isCopy); \
+ if ((local2str) == NULL) { \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (javastr), (localstr)); \
+ h4JNIFatalError(env, "second local c string is not pinned"); \
+ } \
+ } \
+ } \
+}
+
+#define UNPIN_JAVA_STRING_TWO(javastr,localstr,java2str,local2str) { \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (javastr), (localstr)); \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (java2str), (local2str)); \
+}
+
+#define PIN_JAVA_STRING_THREE(javastr,localstr,java2str,local2str,java3str,local3str) { \
+ jboolean isCopy; \
+ if ((javastr) == NULL) { \
+ (localstr) = NULL; \
+ h4nullArgument(env, "java string is NULL"); \
+ } \
+ else if ((java2str) == NULL) { \
+ (local2str) = NULL; \
+ h4nullArgument(env, "second java string is NULL"); \
+ } \
+ else if ((java3str) == NULL) { \
+ (local3str) = NULL; \
+ h4nullArgument(env, "third java string is NULL"); \
+ } \
+ else { \
+ (localstr) = ENVPTR->GetStringUTFChars(ENVPAR (javastr), &isCopy); \
+ if ((localstr) == NULL) { \
+ h4JNIFatalError(env, "local c string is not pinned"); \
+ } \
+ else { \
+ (local2str) = ENVPTR->GetStringUTFChars(ENVPAR (java2str), &isCopy); \
+ if ((local2str) == NULL) { \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (javastr), (localstr)); \
+ h4JNIFatalError(env, "second local c string is not pinned"); \
+ } \
+ else { \
+ (local3str) = ENVPTR->GetStringUTFChars(ENVPAR (java3str), &isCopy); \
+ if ((local3str) == NULL) { \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (javastr), (localstr)); \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (java2str), (local2str)); \
+ h4JNIFatalError(env, "third local c string is not pinned"); \
+ } \
+ } \
+ } \
+ } \
+}
+
+#define UNPIN_JAVA_STRING_THREE(javastr,localstr,java2str,local2str,java3str,local3str) { \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (javastr), (localstr)); \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (java2str), (local2str)); \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (java3str), (local3str)); \
+}
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+extern jboolean h4JNIFatalError(JNIEnv *, const char *);
+extern jboolean h4buildException(JNIEnv *, jint);
+extern jboolean h4badArgument (JNIEnv *, const char *);
+extern jboolean h4nullArgument(JNIEnv *, const char *);
+extern jboolean h4NotImplemented (JNIEnv *, const char *);
+extern jboolean h4outOfMemory (JNIEnv *, const char *);
+extern jboolean h4raiseException(JNIEnv *, const char *);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_h4jni */
diff --git a/java/src/jni/hdfImp.c b/java/src/jni/hdfImp.c
new file mode 100644
index 0000000..a7e59a8
--- /dev/null
+++ b/java/src/jni/hdfImp.c
@@ -0,0 +1,430 @@
+
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+/*
+ * This code is the C-interface called by Java programs to access the
+ * HDF 4 library.
+ *
+ * Each routine wraps a single HDF entry point, generally with the
+ * analogous arguments and return codes.
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf.h"
+#include "hfile.h"
+#include "jni.h"
+#include "h4jni.h"
+
+/*
+ * Class: hdf_hdflib_HDFLibrary
+ * Method: Hopen
+ * Signature: (Ljava/lang/String;II)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdflib_HDFLibrary_Hopen
+(JNIEnv *env, jclass clss, jstring hdfFile, jint access, jint ndds)
+{
+ const char *file;
+ int32 retVal;
+ jclass jc;
+
+ PIN_JAVA_STRING(hdfFile, file);
+ if (file != NULL) {
+ /* open HDF file specified by hdf_HDF_file */
+ retVal = Hopen(file, (intn)access, (int16)ndds);
+
+ UNPIN_JAVA_STRING(hdfFile, file);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+ }
+ return (jlong)retVal;
+}
+
+/*
+ * Class: hdf_hdflib_HDFLibrary
+ * Method: Hclose
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Hclose
+(JNIEnv *env, jclass clss, jlong fid)
+{
+ intn status = 0;
+
+ if (fid < 0) {
+ /* maybe not an exception -- the file is already closed? */
+ return JNI_FALSE;
+ }
+ /* close the HDF file */
+ status = Hclose((int32)fid);
+ if (status == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+/*
+ * Class: hdf_hdflib_HDFLibrary
+ * Method: HDdont_atexit
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_HDdont_1atexit
+(JNIEnv *env, jclass clss)
+{
+ intn ret_value = SUCCEED;
+ ret_value = HDdont_atexit();
+ return (jint)ret_value;
+}
+
+/*
+ * Class: hdf_hdflib_HDFLibrary
+ * Method: Hishdf
+ * Signature: (Ljava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Hishdf
+(JNIEnv *env, jclass clss, jstring hdfFile)
+{
+
+ const char *hfile;
+ intn retVal;
+
+ PIN_JAVA_STRING(hdfFile, hfile);
+ if (hfile != NULL) {
+ /* open HDF file specified by hdf_HDF_file */
+ retVal = Hishdf(hfile);
+
+ UNPIN_JAVA_STRING(hdfFile, hfile);
+
+ if (retVal == FALSE)
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+/*
+ * Class: hdf_hdflib_HDFLibrary
+ * Method: Hnumber
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_Hnumber
+(JNIEnv *env, jclass clss, jlong fid, jint tagtype)
+{
+ int32 retVal;
+
+ retVal = Hnumber((int32)fid, (uint16)tagtype);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)retVal;
+}
+
+/*
+ * Class: hdf_hdflib_HDFLibrary
+ * Method: DFKNTsize
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_DFKNTsize
+(JNIEnv *env, jclass clss, jlong numbertype)
+{
+ int retVal;
+
+ retVal = DFKNTsize((int32)numbertype);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)retVal;
+}
+
+/*
+ * Class: hdf_hdflib_HDFLibrary
+ * Method: Hcache
+ * Signature: (JI)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Hcache
+(JNIEnv *env, jclass clss, jlong file_id, jint cache_switch)
+{
+
+ intn rval;
+ rval = Hcache((int32)file_id, (intn)cache_switch);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+/*
+ * Class: hdf_hdflib_HDFLibrary
+ * Method: Hgetfileversion
+ * Signature: (J[I[Ljava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Hgetfileversion
+(JNIEnv *env, jclass clss, jlong file_id, jintArray vers, jobjectArray string)
+{
+ intn rval;
+ jclass Sjc;
+ char s[LIBVSTR_LEN+1];
+ jstring name;
+ jint *theArgs;
+ jboolean bb;
+ jobject o;
+
+ theArgs = ENVPTR->GetIntArrayElements(ENVPAR vers, &bb);
+
+ rval = Hgetfileversion((int32) file_id, (uint32 *)&(theArgs[0]),
+ (uint32 *)&(theArgs[1]), (uint32 *)&(theArgs[2]), s);
+ s[LIBVSTR_LEN] = '\0';
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR vers, theArgs, JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR vers, theArgs, 0);
+ o = ENVPTR->GetObjectArrayElement(ENVPAR string, 0);
+ if (o == NULL) {
+ CALL_ERROR_CHECK();
+ }
+ else {
+ Sjc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (Sjc == NULL) {
+ CALL_ERROR_CHECK();
+ }
+ else if (ENVPTR->IsInstanceOf(ENVPAR o, Sjc) == JNI_FALSE) {
+ CALL_ERROR_CHECK();
+ }
+ else {
+ name = ENVPTR->NewStringUTF(ENVPAR s);
+ if (name != NULL) {
+ ENVPTR->SetObjectArrayElement(ENVPAR string, 0, (jobject)name);
+ }
+ }
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ }
+ }
+ return JNI_TRUE;
+}
+
+/*
+ * Class: hdf_hdflib_HDFLibrary
+ * Method: Hgetlibversion
+ * Signature: ([I[Ljava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Hgetlibversion
+(JNIEnv *env, jclass clss, jintArray vers, jobjectArray string)
+{
+ intn rval;
+ jclass Sjc;
+ char s[LIBVSTR_LEN+1] ;
+ jint *theArgs;
+ jstring name;
+ jobject o;
+ jboolean bb;
+
+ s[LIBVSTR_LEN] = '\0';
+ if (string == NULL) {
+ h4nullArgument(env, "Hgetlibversion: string is NULL");
+ } /* end if */
+ else if (vers == NULL) {
+ h4nullArgument(env, "Hgetlibversion: vers is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR vers) < 3) {
+ h4badArgument(env, "Hgetlibversion: vers input array < order 3");
+ } /* end else if */
+ else {
+ theArgs = ENVPTR->GetIntArrayElements(ENVPAR vers, &bb);
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "Hgetlibversion: vers not pinned");
+ } /* end if */
+ else {
+ rval = Hgetlibversion((uint32 *)&(theArgs[0]),
+ (uint32 *)&(theArgs[1]), (uint32 *)&(theArgs[2]), s);
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR vers, theArgs, JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR vers, theArgs, 0);
+ Sjc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (Sjc == NULL) {
+ CALL_ERROR_CHECK();
+ }
+ else {
+ o = ENVPTR->GetObjectArrayElement(ENVPAR string, 0);
+ if (o == NULL) {
+ CALL_ERROR_CHECK();
+ }
+ else {
+ bb = ENVPTR->IsInstanceOf(ENVPAR o, Sjc);
+ if (bb == JNI_FALSE) {
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ name = ENVPTR->NewStringUTF(ENVPAR s);
+ if (name != NULL) {
+ ENVPTR->SetObjectArrayElement(ENVPAR string, 0, (jobject)name);
+ }
+ }
+ }
+ }
+ }
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+
+/*
+ * Class: hdf_hdflib_HDFLibrary
+ * Method: Hsetaccesstype
+ * Signature: (JI)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Hsetaccesstype
+(JNIEnv *env, jclass clss, jlong h_id, jint access_type)
+{
+ intn rval;
+
+ rval = Hsetaccesstype((int32)h_id, (uintn)access_type);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+/*
+ * Class: hdf_hdflib_HDFLibrary
+ * Method: Hsync
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Hsync
+(JNIEnv *env, jclass clss, jlong file_id)
+{
+ intn rval;
+
+ rval = Hsync((int32)file_id);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+/*
+ * Class: hdf_hdflib_HDFLibrary
+ * Method: HDFclose
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_HDFclose
+(JNIEnv *env, jclass clss, jlong file_id)
+{
+ intn rval;
+
+ rval = Hclose((int32)file_id);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+
+/*
+ * Class: hdf_hdflib_HDFLibrary
+ * Method: HDFopen
+ * Signature: (Ljava/lang/String;IS)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_HDFopen
+(JNIEnv *env, jclass clss, jstring filename, jint access, jshort n_dds)
+{
+ int32 rval;
+ const char *str;
+
+ PIN_JAVA_STRING(filename, str);
+ if (str != NULL) {
+ rval = HDFopen((char *)str, (intn)access, (int16)n_dds);
+
+ UNPIN_JAVA_STRING(filename, str);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+ return rval;
+}
+
+
+#ifdef not_yet_implemented
+/*
+ * Class: hdf_hdflib_HDFLibrary
+ * Method: HDFflusdd
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_HDFflusdd
+(JNIEnv *env, jclass clss, jlong file_id)
+{
+ intn rval;
+
+ rval = Hflushdd((int32)file_id);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+#endif
+
+/*
+ * Class: hdf_hdflib_HDFLibrary
+ * Method: HDgetNTdesc
+ * Signature: (I)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_hdf_hdflib_HDFLibrary_HDgetNTdesc
+(JNIEnv *env, jclass clss, jint nt)
+{
+ char *rval;
+ jstring rstring;
+
+ rval = HDgetNTdesc((int32)nt);
+
+ if (rval != NULL) {
+ rstring = ENVPTR->NewStringUTF(ENVPAR rval);
+ HDfree(rval);
+ }
+ else
+ rstring = NULL;
+
+ return rstring;
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/src/jni/hdfanImp.c b/java/src/jni/hdfanImp.c
new file mode 100644
index 0000000..968b720
--- /dev/null
+++ b/java/src/jni/hdfanImp.c
@@ -0,0 +1,393 @@
+
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+/*
+ * This code is the C-interface called by Java programs to access the
+ * HDF 4.2 library.
+ *
+ * Each routine wraps a single HDF entry point, generally with the
+ * analogous arguments and return codes.
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf.h"
+#include "jni.h"
+#include "h4jni.h"
+
+JNIEXPORT jlong JNICALL
+Java_hdf_hdflib_HDFLibrary_ANstart
+(JNIEnv *env, jclass clss, jlong file_id)
+{
+ int32 retVal = -1;
+
+ retVal = ANstart((int32)file_id);
+ if (retVal < 0)
+ CALL_ERROR_CHECK();
+
+ return (jlong)retVal;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_ANend
+(JNIEnv *env, jclass clss, jlong anid)
+{
+ int32 retVal;
+
+ retVal = ANend((int32)anid);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_ANfileinfo
+(JNIEnv *env, jclass clss, jlong anid, jintArray info)
+{
+ intn retVal;
+ jint *theArgs;
+ jboolean isCopy;
+
+ if (info == NULL) {
+ h4nullArgument(env, "ANfileinfo: info is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR info) < 4) {
+ h4badArgument(env, "ANfileinfo: info input array < order 4");
+ } /* end else if */
+ else {
+ theArgs = ENVPTR->GetIntArrayElements(ENVPAR info, &isCopy);
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "ANfileinfo: info not pinned");
+ } /* end if */
+ else {
+ retVal = ANfileinfo((int32)anid, (int32 *)&(theArgs[0]),
+ (int32 *)&(theArgs[1]), (int32 *)&(theArgs[2]),
+ (int32 *)&(theArgs[3]));
+
+ if (retVal == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR info, theArgs, JNI_ABORT);
+ CALL_ERROR_CHECK();
+ } /* end if */
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR info, theArgs, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+
+JNIEXPORT jlong JNICALL
+Java_hdf_hdflib_HDFLibrary_ANselect
+(JNIEnv *env, jclass clss, jlong anid, jint index, jint anntype)
+{
+ int32 retVal;
+
+ retVal = ANselect((int32)anid, (int32)index, (ann_type)anntype);
+ if (retVal < 0)
+ CALL_ERROR_CHECK();
+
+ return (jlong)retVal;
+}
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_ANendaccess
+(JNIEnv *env, jclass clss, jlong ann_id)
+{
+ intn retVal;
+
+ retVal = ANendaccess((int32)ann_id);
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_ANnumann
+(JNIEnv *env, jclass clss, jlong an_id, jint anntype, jshort tag, jshort ref)
+{
+ int32 retVal;
+
+ retVal = ANnumann((int32)an_id, (ann_type)anntype, (uint16)tag, (uint16)ref);
+ if (retVal < 0)
+ CALL_ERROR_CHECK();
+
+ return (jint)retVal;
+}
+
+JNIEXPORT jshort JNICALL
+Java_hdf_hdflib_HDFLibrary_ANatype2tag
+(JNIEnv *env, jclass clss, jint antype)
+{
+ uint16 retVal;
+
+ retVal = ANatype2tag((ann_type)antype);
+ if (retVal < 0)
+ CALL_ERROR_CHECK();
+
+ return (jshort)retVal;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_ANtag2atype
+(JNIEnv *env, jclass clss, jint antag)
+{
+ int32 retVal;
+
+ retVal = ANtag2atype((uint16)antag);
+ if (retVal < 0)
+ CALL_ERROR_CHECK();
+
+ return (jint)retVal;
+}
+
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_ANannlist
+(JNIEnv *env, jclass clss, jlong an_id, jint anntype, jshort tag, jshort ref, jintArray annlist)
+{
+ intn retVal;
+ jint *iarr;
+ jboolean isCopy;
+
+ if (annlist == NULL) {
+ h4nullArgument(env, "ANannlist: annlist is NULL");
+ } /* end if */
+ else {
+ iarr = ENVPTR->GetIntArrayElements(ENVPAR annlist, &isCopy);
+ if (iarr == NULL) {
+ h4JNIFatalError(env, "ANannlist: annlist not pinned");
+ } /* end if */
+ else {
+ retVal = ANannlist((int32)an_id, (ann_type)anntype, (uint16)tag,(uint16)ref,(int32 *)iarr);
+
+ if (retVal == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR annlist, iarr, JNI_ABORT);
+ CALL_ERROR_CHECK();
+ } /* end if */
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR annlist, iarr, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ return (jint)retVal;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_ANannlen
+(JNIEnv *env, jclass clss, jlong ann_id)
+{
+ int32 retVal;
+
+ retVal = ANannlen((int32)ann_id);
+ if (retVal < 0)
+ CALL_ERROR_CHECK();
+
+ return (jint)retVal;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_ANreadann
+(JNIEnv *env, jclass clss, jlong ann_id, jobjectArray annbuf, jint maxlen)
+{
+ int32 retVal;
+ char *data;
+ jclass Sjc;
+ jstring rstring;
+ jobject o;
+ jboolean bb;
+
+ data = (char *)HDmalloc((maxlen+1)*sizeof(char));
+ if (data == NULL) {
+ /* Exception */
+ h4outOfMemory(env, "ANreadan");
+ }
+ else {
+ /* read annotation from HDF */
+ retVal = ANreadann((int32)ann_id, data, (int32)maxlen);
+
+ if (retVal == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+ else {
+ o = ENVPTR->GetObjectArrayElement(ENVPAR annbuf, 0);
+ if (o == NULL) {
+ CALL_ERROR_CHECK();
+ }
+ else {
+ Sjc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (Sjc == NULL) {
+ CALL_ERROR_CHECK();
+ }
+ else if (ENVPTR->IsInstanceOf(ENVPAR o, Sjc) == JNI_FALSE) {
+ CALL_ERROR_CHECK();
+ }
+ else {
+ data[maxlen] = '\0';
+ rstring = ENVPTR->NewStringUTF(ENVPAR data);
+ if (rstring != NULL)
+ ENVPTR->SetObjectArrayElement(ENVPAR annbuf, 0, (jobject)rstring);
+ }
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ }
+ }
+ HDfree((char *)data);
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_ANcreate
+(JNIEnv *env, jclass clss, jlong an_id, jshort tag, jshort ref, jint type)
+{
+ int32 retVal;
+
+ retVal = ANcreate((int32) an_id, (uint16) tag, (uint16) ref, (ann_type) type);
+ if (retVal < 0)
+ CALL_ERROR_CHECK();
+
+ return (jint)retVal;
+}
+
+JNIEXPORT jlong JNICALL
+Java_hdf_hdflib_HDFLibrary_ANcreatef
+(JNIEnv *env, jclass clss, jlong an_id, jint type)
+{
+ int32 retVal;
+
+ retVal = ANcreatef((int32) an_id, (ann_type) type);
+ if (retVal < 0)
+ CALL_ERROR_CHECK();
+
+ return (jlong)retVal;
+}
+
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_ANget_1tagref
+(JNIEnv *env, jclass clss, jlong an_id, jint index, jint type, jshortArray tagref)
+{
+ int32 rval;
+ short *theArgs;
+ jboolean isCopy;
+
+ if (tagref == NULL) {
+ h4nullArgument(env, "ANget_tagref: tagref is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR tagref) < 2) {
+ h4badArgument(env, "ANget_tagref: tagref input array < order 2");
+ } /* end else if */
+ else {
+ theArgs = ENVPTR->GetShortArrayElements(ENVPAR tagref, &isCopy);
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "ANget_tagref: tagref not pinned");
+ } /* end if */
+ else {
+ rval = ANget_tagref((int32)an_id, (int32)index, (ann_type)type, (uint16 *)&(theArgs[0]), (uint16 *)&(theArgs[1]));
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseShortArrayElements(ENVPAR tagref, theArgs, JNI_ABORT);
+ CALL_ERROR_CHECK();
+ } /* end if */
+ else {
+ ENVPTR->ReleaseShortArrayElements(ENVPAR tagref, theArgs, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return rval;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_ANid2tagref
+(JNIEnv *env, jclass clss, jlong an_id, jshortArray tagref)
+{
+ int32 rval;
+ short *theArgs;
+ jboolean isCopy;
+
+ if (tagref == NULL) {
+ h4nullArgument(env, "ANid2tagref: tagref is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR tagref) < 2) {
+ h4badArgument(env, "ANid2tagref: tagref input array < order 2");
+ } /* end else if */
+ else {
+ theArgs = ENVPTR->GetShortArrayElements(ENVPAR tagref, &isCopy);
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "ANid2tagref: tagref not pinned");
+ } /* end if */
+ else {
+ rval = ANid2tagref((int32)an_id, (uint16 *)&(theArgs[0]), (uint16 *)&(theArgs[1]));
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseShortArrayElements(ENVPAR tagref, theArgs, JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseShortArrayElements(ENVPAR tagref, theArgs, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jlong JNICALL
+Java_hdf_hdflib_HDFLibrary_ANtagref2id
+(JNIEnv *env, jclass clss, jlong an_id, jshort tag, jshort ref)
+{
+ int32 retVal;
+
+ retVal = ANtagref2id((int32)an_id, (uint16)tag, (uint16)ref);
+ if (retVal < 0)
+ CALL_ERROR_CHECK();
+
+ return (jlong)retVal;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_ANwriteann
+(JNIEnv *env, jclass clss, jlong ann_id, jstring label, jint ann_length)
+{
+
+ intn rval;
+ const char *str;
+
+ PIN_JAVA_STRING(label, str);
+
+ if (str != NULL) {
+ /* should check that str is as long as ann_length.... */
+
+ rval = ANwriteann((int32)ann_id, str, (int32)ann_length);
+
+ UNPIN_JAVA_STRING(label, str);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/src/jni/hdfdfpalImp.c b/java/src/jni/hdfdfpalImp.c
new file mode 100644
index 0000000..a4b6895
--- /dev/null
+++ b/java/src/jni/hdfdfpalImp.c
@@ -0,0 +1,221 @@
+
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+/*
+ * This code is the C-interface called by Java programs to access the
+ * HDF 4.2 library.
+ *
+ * Each routine wraps a single HDF entry point, generally with the
+ * analogous arguments and return codes.
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+#include "hdf.h"
+#include "h4jni.h"
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DFPaddpal
+(JNIEnv *env, jclass clss, jstring filename, jbyteArray palette)
+{
+ intn rval;
+ const char *f;
+ jbyte *dat;
+ jboolean bb;
+
+ rval = FAIL;
+
+ if (palette == NULL) {
+ h4nullArgument(env, "DFPaddpal: palette is NULL");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(filename, f);
+ if (f != NULL) {
+ dat = ENVPTR->GetByteArrayElements(ENVPAR palette, &bb);
+ if (dat == NULL) {
+ h4JNIFatalError(env, "DFPaddpal: palette not pinned");
+ } /* end if */
+ else {
+ rval = DFPaddpal(f, (VOIDP)dat);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR palette, dat, JNI_ABORT);
+ } /* end else */
+ UNPIN_JAVA_STRING(filename, f);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DFPgetpal
+(JNIEnv *env, jclass clss, jstring filename, jbyteArray palette)
+{
+ intn rval;
+ const char *f;
+ jbyte *dat;
+ jboolean bb;
+ int copyMode = JNI_ABORT;
+
+ if (palette == NULL) {
+ h4nullArgument(env, "DFPgetpal: palette is NULL");
+ } /* end if */
+ else {
+ dat = ENVPTR->GetByteArrayElements(ENVPAR palette, &bb);
+
+ if (dat == NULL) {
+ h4JNIFatalError(env, "DFPgetpal: palette not pinned");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(filename, f);
+ if (f != NULL) {
+ rval = DFPgetpal(f, (VOIDP)dat);
+
+ UNPIN_JAVA_STRING(filename, f);
+
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ } /* end if */
+ else {
+ copyMode = 0;
+ } /* end else */
+ } /* end if */
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR palette, dat, copyMode);
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jshort JNICALL
+Java_hdf_hdflib_HDFLibrary_DFPlastref
+(JNIEnv *env, jclass clss)
+{
+ return (DFPlastref());
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_DFPnpals
+(JNIEnv *env, jclass clss, jstring filename)
+{
+ intn rval;
+ const char *f;
+
+ PIN_JAVA_STRING(filename, f);
+ if (f != NULL) {
+ rval = DFPnpals(f);
+
+ UNPIN_JAVA_STRING(filename, f);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return rval;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DFPputpal
+(JNIEnv *env, jclass clss, jstring filename, jbyteArray palette, jint overwrite, jstring filemode)
+{
+ intn rval;
+ const char *f;
+ const char *m;
+ jbyte *dat;
+ jboolean bb;
+
+ rval = FAIL;
+
+ if (palette == NULL) {
+ h4nullArgument(env, "DFPputpal: palette is NULL");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING_TWO(filename, f, filemode, m);
+ if (f != NULL && m != NULL) {
+ if (dat == NULL) {
+ h4JNIFatalError(env, "DFPputpal: palette not pinned");
+ } /* end if */
+ else {
+ dat = ENVPTR->GetByteArrayElements(ENVPAR palette, &bb);
+
+ rval = DFPputpal (f, (VOIDP)dat, (intn)overwrite, m);
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR palette, dat, JNI_ABORT);
+ } /* end else */
+
+ UNPIN_JAVA_STRING_TWO(filename, f, filemode, m);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DFPreadref
+(JNIEnv *env, jclass clss, jstring filename, jshort ref)
+{
+ intn rval;
+ const char *f;
+
+ PIN_JAVA_STRING(filename, f);
+ if (f != NULL) {
+ rval = DFPreadref((char *)f, (uint16) ref);
+
+ UNPIN_JAVA_STRING(filename, f);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jshort JNICALL
+Java_hdf_hdflib_HDFLibrary_DFPrestart
+(JNIEnv *env, jclass clss)
+{
+ return (DFPrestart());
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DFPwriteref
+(JNIEnv *env, jclass clss, jstring filename, jshort ref)
+{
+ intn rval;
+ const char *f;
+
+ PIN_JAVA_STRING(filename, f);
+ if (f != NULL) {
+ rval = DFPwriteref((char *)f, (uint16) ref);
+
+ UNPIN_JAVA_STRING(filename, f);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/src/jni/hdfdfuImp.c b/java/src/jni/hdfdfuImp.c
new file mode 100644
index 0000000..9e55441
--- /dev/null
+++ b/java/src/jni/hdfdfuImp.c
@@ -0,0 +1,44 @@
+
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+/*
+ * This code is the C-interface called by Java programs to access the
+ * HDF 4.2 library.
+ *
+ * Each routine wraps a single HDF entry point, generally with the
+ * analogous arguments and return codes.
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+#include "hdf.h"
+#include "jni.h"
+#include "h4jni.h"
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DFUfptoimage
+(JNIEnv *env, jclass clss, jint hdim, jint vdim,
+ jfloat max, jfloat min, jfloatArray hscale, jfloatArray vscale, jfloatArray data,
+ jbyteArray palette, jstring outfile, jint ct_method, jint hres, jint vres, jint compress)
+{
+ h4NotImplemented(env, "DFUfptoimage (windows)");
+ return JNI_TRUE;
+}
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/src/jni/hdfexceptionImp.c b/java/src/jni/hdfexceptionImp.c
new file mode 100644
index 0000000..9491a47
--- /dev/null
+++ b/java/src/jni/hdfexceptionImp.c
@@ -0,0 +1,186 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+/*
+ * This is a utility program used by the HDF Java-C wrapper layer to
+ * generate exceptions. This may be called from any part of the
+ * Java-C interface.
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf.h"
+#include "h4jni.h"
+#include <stdlib.h>
+
+/********************/
+/* Local Macros */
+/********************/
+
+#define THROWEXCEPTION(className,args) { \
+ jclass jc; \
+ jmethodID jm; \
+ jobject ex; \
+ jc = ENVPTR->FindClass(ENVPAR (className)); \
+ if (jc == NULL) { \
+ return JNI_FALSE; \
+ } \
+ jm = ENVPTR->GetMethodID(ENVPAR jc, "<init>", "(Ljava/lang/String;)V"); \
+ if (jm == NULL) { \
+ return JNI_FALSE; \
+ } \
+ ex = ENVPTR->NewObjectA (ENVPAR jc, jm, (jvalue*)(args)); \
+ if (ENVPTR->Throw(ENVPAR (jthrowable)ex) < 0) { \
+ printf("FATAL ERROR: %s: Throw failed\n", (className)); \
+ return JNI_FALSE; \
+ } \
+ return JNI_TRUE; \
+}
+
+jboolean
+h4buildException
+(JNIEnv *env, jint HDFerr)
+{
+jmethodID jm;
+jclass jc;
+int args[2];
+jobject ex;
+int rval;
+
+
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFLibraryException");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jm = ENVPTR->GetMethodID(ENVPAR jc, "<init>", "(I)V");
+ if (jm == NULL) {
+ return JNI_FALSE;
+ }
+ args[0] = HDFerr;
+ args[1] = 0;
+
+ ex = ENVPTR->NewObjectA (ENVPAR jc, jm, (jvalue *)args );
+
+ rval = ENVPTR->Throw(ENVPAR (jthrowable)ex );
+
+ return JNI_TRUE;
+}
+
+/*
+ * Routine to raise particular Java exceptions from C
+ */
+static
+jboolean
+H4JNIErrorClass
+(JNIEnv *env, const char *message, const char *className)
+{
+ char *args[2];
+ jstring str = ENVPTR->NewStringUTF(ENVPAR message);
+ args[0] = (char *)str;
+ args[1] = 0;
+
+ THROWEXCEPTION(className, args);
+} /* end H5JNIErrorClass() */
+
+/*
+ * A bad argument in an HDF call
+ * Create and throw an 'IllegalArgumentException'
+ *
+ * Note: This routine never returns from the 'throw',
+ * and the Java native method immediately raises the
+ * exception.
+ */
+jboolean
+h4badArgument
+ (JNIEnv *env, const char *functName)
+{
+ return H4JNIErrorClass(env, functName, "java/lang/IllegalArgumentException");
+} /* end h4badArgument() */
+
+/*
+ * A NULL argument in an HDF call
+ * Create and throw an 'NullPointerException'
+ *
+ * Note: This routine never returns from the 'throw',
+ * and the Java native method immediately raises the
+ * exception.
+ */
+jboolean
+h4nullArgument
+ (JNIEnv *env, const char *functName)
+{
+ return H4JNIErrorClass(env, functName, "java/lang/NullPointerException");
+} /* end h4nullArgument() */
+
+jboolean
+h4NotImplemented
+(JNIEnv *env, const char *functName)
+{
+ return H4JNIErrorClass(env, functName, "hdf/hdflib/HDFNotImplementedException");
+}
+
+jboolean
+h4outOfMemory
+(JNIEnv *env, const char *functName)
+{
+ return H4JNIErrorClass(env, functName, "java/lang/OutOfMemoryError");
+}
+
+/*
+ * A fatal error in a JNI call
+ */
+jboolean
+h4JNIFatalError
+(JNIEnv *env, const char *functName)
+{
+ return H4JNIErrorClass(env, functName, "java/lang/InternalError");
+}
+
+jboolean
+h4raiseException
+(JNIEnv *env, const char *message)
+{
+ return H4JNIErrorClass(env, message, "hdf/hdflib/HDFLibraryException");
+}
+/*
+ * Class: hdf_hdflib_HDFLibraryException
+ * Method: printStackTrace0
+ * Signature: (Ljava/lang/Object;)V
+ *
+ * Call the HDF library to print the HDF error stack to 'file_name'.
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdflib_HDFLibraryException_printStackTrace0
+(JNIEnv *env, jclass clss, jstring file_name)
+{
+ FILE *stream = NULL;
+ const char *file = NULL;
+
+ if(file_name == NULL) {
+ HEprint(stderr, 0);
+ } /* end if */
+ else {
+ file = ENVPTR->GetStringUTFChars(ENVPAR file_name, 0);
+ stream = fopen(file, "a+");
+ if(stream) {
+ HEprint(stream, 0);
+ fclose(stream);
+ } /* end if */
+ ENVPTR->ReleaseStringUTFChars(ENVPAR file_name, file);
+ } /* end else */
+} /* end Java_hdf_hdflib_HDFLibraryException_printStackTrace0() */
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/src/jni/hdfgrImp.c b/java/src/jni/hdfgrImp.c
new file mode 100644
index 0000000..0c8ae3f
--- /dev/null
+++ b/java/src/jni/hdfgrImp.c
@@ -0,0 +1,1044 @@
+
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+/*
+ * This code is the C-interface called by Java programs to access the
+ * HDF 4 library.
+ *
+ * Each routine wraps a single HDF entry point, generally with the
+ * analogous arguments and return codes.
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+#include "hdf.h"
+#include "mfhdf.h"
+#include "jni.h"
+#include "h4jni.h"
+
+/* Name changed from MAX_GR_NAME to H4_MAX_GR_NAME in hdf4.2r2 */
+#define MAX_GR_NAME H4_MAX_GR_NAME
+
+extern jboolean makeChunkInfo(JNIEnv *env, jobject chunkobj, int32 flgs, HDF_CHUNK_DEF *cinf);
+extern jboolean getNewCompInfo(JNIEnv *env, jobject ciobj, comp_info *cinf);
+extern jboolean setNewCompInfo(JNIEnv *env, jobject ciobj, comp_coder_t coder, comp_info *cinf);
+extern jboolean getChunkInfo(JNIEnv *env, jobject chunkobj, HDF_CHUNK_DEF *cinf);
+
+
+
+JNIEXPORT jlong JNICALL
+Java_hdf_hdflib_HDFLibrary_GRstart
+(JNIEnv *env, jclass cls, jlong file_id)
+{
+ intn rval;
+
+ rval = GRstart((int32)file_id);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jlong)rval;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRend
+(JNIEnv *env, jclass cls, jlong gr_id)
+{
+ intn rval;
+
+ rval = GRend((int32) gr_id);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRendaccess
+(JNIEnv *env, jclass cls, jlong gr_id)
+{
+ intn rval;
+
+ rval = GRendaccess((int32) gr_id);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRfileinfo
+(JNIEnv *env, jclass cls, jlong gr_id, jintArray argv)
+{
+ intn rval;
+ jint *theArgs;
+ jboolean isCopy; /* dummy */
+
+ if (argv == NULL) {
+ h4nullArgument(env, "GRfileinfo: argv is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR argv) < 2) {
+ h4badArgument(env, "GRfileinfo: argv input array < order 2");
+ } /* end else if */
+ else {
+ theArgs = ENVPTR->GetIntArrayElements(ENVPAR argv, &isCopy);
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "GRfileinfo: argv not pinned");
+ } /* end if */
+ else {
+ rval = GRfileinfo((int32)gr_id, (int32 *)&(theArgs[0]), (int32 *)&(theArgs[1]));
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv, theArgs, JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv, theArgs, 0);
+ }
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jlong JNICALL
+Java_hdf_hdflib_HDFLibrary_GRselect
+(JNIEnv *env, jclass cls, jlong gr_id, jint index)
+{
+ int32 rval;
+
+ rval = GRselect((int32)gr_id, (int32) index);
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+
+ return (jlong)rval;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_GRnametoindex
+(JNIEnv *env, jclass cls, jlong gr_id, jstring gr_name)
+{
+ int32 rval;
+ const char *str;
+
+ PIN_JAVA_STRING(gr_name, str);
+ if (str != NULL) {
+ rval = GRnametoindex((int32)gr_id, str);
+
+ UNPIN_JAVA_STRING(gr_name, str);
+
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+ }
+
+ return (jint)rval;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRgetchunkinfo
+(JNIEnv *env, jclass cls, jlong grsid, jobject chunk_def, jintArray cflags)
+{
+ int32 rval;
+ HDF_CHUNK_DEF cdef;
+ jboolean stat;
+ jint *flgs;
+ jboolean isCopy;
+
+ if (cflags == NULL) {
+ h4nullArgument(env, "GRgetchunkinfo: cflags is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR cflags) < 1) {
+ h4badArgument(env, "GRgetchunkinfo: cflags input array < order 1");
+ } /* end else if */
+ else {
+ flgs = ENVPTR->GetIntArrayElements(ENVPAR cflags, &isCopy);
+ if (flgs == NULL) {
+ h4JNIFatalError(env, "GRgetchunkinfo: cflags not pinned");
+ } /* end if */
+ else {
+ rval = GRgetchunkinfo( (int32)grsid, &cdef, (int32 *)&(flgs[0]));
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR cflags, (jint *)flgs, JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ stat = JNI_TRUE;
+ if (*flgs != 0) {
+ /* convert cdef to HDFchunkinfo */
+ stat = makeChunkInfo(env, chunk_def, *flgs, &cdef);
+ }
+ ENVPTR->ReleaseIntArrayElements(ENVPAR cflags, (jint *)flgs, 0);
+ }
+ } /* end else */
+ } /* end else */
+
+ return stat/*JNI_TRUE*/;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRgetiminfo
+(JNIEnv *env, jclass cls, jlong ri_id, jobjectArray gr_name, jintArray argv, jintArray dim_sizes)
+{
+ intn rval;
+ jint *dims;
+ jint *theArgs;
+ jclass Sjc;
+ char *str;
+ jstring rstring;
+ jboolean isCopy;
+ jboolean bb = 0;
+ jboolean abb = 0;
+ jobject o;
+
+ str = (char *)HDmalloc(MAX_GR_NAME+1);
+ if (str == NULL) {
+ h4outOfMemory(env, "GRgetiminfo");
+ }
+ else {
+ if (gr_name == NULL) {
+ h4nullArgument(env, "GRgetiminfo: gr_name is NULL");
+ } /* end if */
+ else if (dim_sizes == NULL) {
+ h4nullArgument(env, "GRgetiminfo: dim_sizes is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR dim_sizes) < 2) {
+ h4badArgument(env, "GRgetiminfo: dim_sizes input array < order 2");
+ } /* end else if */
+ else if (argv == NULL) {
+ h4nullArgument(env, "GRgetiminfo: argv is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR argv) < 4) {
+ h4badArgument(env, "GRgetiminfo: argv input array < order 4");
+ } /* end else if */
+ else {
+ dims = ENVPTR->GetIntArrayElements(ENVPAR dim_sizes, &isCopy);
+ if (dims == NULL) {
+ h4JNIFatalError(env, "GRgetiminfo: dim_sizes not pinned");
+ } /* end if */
+ else {
+ theArgs = ENVPTR->GetIntArrayElements(ENVPAR argv, &isCopy);
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "GRgetiminfo: argv not pinned");
+ } /* end if */
+ else {
+ rval = GRgetiminfo((int32) ri_id, (char *)str, (int32 *)&(theArgs[0]),
+ (int32 *)&(theArgs[1]), (int32 *)&(theArgs[2]), (int32 *)dims,
+ (int32 *)&(theArgs[3]));
+
+ if (rval == FAIL) {
+ abb = JNI_ABORT;
+ bb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ else {
+ o = ENVPTR->GetObjectArrayElement(ENVPAR gr_name, 0);
+ if (o == NULL) {
+ bb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ else {
+ Sjc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (Sjc == NULL) {
+ bb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ else if (ENVPTR->IsInstanceOf(ENVPAR o, Sjc) == JNI_FALSE) {
+ bb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ else {
+ str[MAX_GR_NAME] = '\0';
+ rstring = ENVPTR->NewStringUTF(ENVPAR str);
+ if (rstring != NULL)
+ ENVPTR->SetObjectArrayElement(ENVPAR gr_name, 0, (jobject)rstring);
+ }
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ }
+ }
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv, theArgs, abb);
+
+ } /* end else */
+ ENVPTR->ReleaseIntArrayElements(ENVPAR dim_sizes, dims, bb);
+
+ } /* end else */
+ } /* end else */
+
+ HDfree(str);
+ }
+ return JNI_TRUE;
+}
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRreadimage
+(JNIEnv *env, jclass cls, jlong ri_id, jintArray start, jintArray stride, jintArray edge, jbyteArray data)
+{
+ intn rval;
+ jbyte *arr;
+ jint *strt;
+ jint *strd;
+ jint *edg;
+ jboolean bb;
+ jboolean cbb = 0;
+
+ if (data == NULL) {
+ h4nullArgument(env, "GRreadimage: data is NULL");
+ } /* end if */
+ else if (start == NULL) {
+ h4nullArgument(env, "GRreadimage: start is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR start) < 2) {
+ h4badArgument(env, "GRreadimage: start input array < order 2");
+ } /* end else if */
+ else if (edge == NULL) {
+ h4nullArgument(env, "GRreadimage: edge is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR edge) < 2) {
+ h4badArgument(env, "GRreadimage: edge input array < order 2");
+ } /* end else if */
+ else {
+ arr = (jbyte *)ENVPTR->GetPrimitiveArrayCritical(ENVPAR data, &bb);
+
+ strt = ENVPTR->GetIntArrayElements(ENVPAR start, &bb);
+ if (strt == NULL) {
+ h4JNIFatalError(env, "GRreadimage: start not pinned");
+ } /* end if */
+ else {
+ edg = ENVPTR->GetIntArrayElements(ENVPAR edge, &bb);
+ if (edg == NULL) {
+ h4JNIFatalError(env, "GRreadimage: edge not pinned");
+ } /* end if */
+ else {
+ if (stride == NULL) {
+ strd = NULL;
+ }
+ else {
+ strd = ENVPTR->GetIntArrayElements(ENVPAR stride, &bb);
+ }
+ rval = GRreadimage((int32)ri_id, (int32 *)strt, (int32 *)strd,
+ (int32 *)edg, (VOIDP)arr);
+
+ if (stride != NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR stride, strd, JNI_ABORT);
+ }
+ ENVPTR->ReleaseIntArrayElements(ENVPAR edge, edg, JNI_ABORT);
+
+ if (rval == FAIL) {
+ cbb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ } /* end else */
+ ENVPTR->ReleaseIntArrayElements(ENVPAR start, strt, JNI_ABORT);
+ }
+ ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR data, arr, cbb);
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jshort JNICALL
+Java_hdf_hdflib_HDFLibrary_GRidtoref
+(JNIEnv *env, jclass cls, jlong gr_id)
+{
+ uint16 rval;
+
+ rval = GRidtoref((int32) gr_id);
+ if (rval <= 0)
+ CALL_ERROR_CHECK();
+
+ return (jshort)rval;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_GRreftoindex
+(JNIEnv *env, jclass cls, jlong gr_id, jshort ref)
+{
+ int32 rval;
+
+ rval = GRreftoindex((int32) gr_id, (uint16)ref);
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+
+ return (jint)rval;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRreqlutil
+(JNIEnv *env, jclass cls, jlong gr_id, jint interlace_mode)
+{
+ intn rval;
+
+ rval = GRreqlutil((int32) gr_id, (intn)interlace_mode);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRreqimageil
+(JNIEnv *env, jclass cls, jlong gr_id, jint interlace_mode)
+{
+ intn rval;
+
+ rval = GRreqimageil((int32) gr_id, (intn)interlace_mode);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jlong JNICALL
+Java_hdf_hdflib_HDFLibrary_GRgetlutid
+(JNIEnv *env, jclass cls, jlong gr_id, jint index)
+{
+ int32 rval;
+
+ rval = GRgetlutid((int32) gr_id, (int32)index);
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+
+ return (jlong)rval;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_GRgetnluts
+(JNIEnv *env, jclass cls, jlong gr_id)
+{
+ intn rval;
+
+ rval = GRgetnluts((int32) gr_id);
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+
+ return (jint)rval;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRgetlutinfo
+(JNIEnv *env, jclass cls, jlong ri_id, jintArray argv)
+{
+ intn rval;
+ jint * theArgs;
+ jboolean isCopy; /* dummy */
+
+ if (argv == NULL) {
+ h4nullArgument(env, "GRgetlutinfo: argv is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR argv) < 4) {
+ h4badArgument(env, "GRgetlutinfo: argv input array < order 4");
+ } /* end else if */
+ else {
+ theArgs = ENVPTR->GetIntArrayElements(ENVPAR argv, &isCopy);
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "GRgetlutinfo: argv not pinned");
+ } /* end if */
+ else {
+ rval = GRgetlutinfo((int32)ri_id, (int32 *)&(theArgs[0]),
+ (int32 *)&(theArgs[1]), (int32 *)&(theArgs[2]),
+ (int32 *)&(theArgs[3]));
+
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv, theArgs, JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv, theArgs, 0);
+ }
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRreadlut
+(JNIEnv *env, jclass cls, jlong pal_id, jbyteArray pal_data)
+{
+ intn rval;
+ jbyte *arr;
+ jboolean bb;
+
+ if (pal_data == NULL) {
+ h4nullArgument(env, "GRreadlut: pal_data is NULL");
+ } /* end if */
+ else {
+ arr = ENVPTR->GetByteArrayElements(ENVPAR pal_data, &bb);
+ if (arr == NULL) {
+ h4JNIFatalError(env, "GRreadlut: pal_data not pinned");
+ } /* end if */
+ else {
+ rval = GRreadlut((int32) pal_id, (VOIDP)arr);
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR pal_data, arr, JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR pal_data, arr, 0);
+ }
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRattrinfo
+(JNIEnv *env, jclass cls, jlong gr_id, jint attr_index, jobjectArray name, jintArray argv)
+{
+ int32 rval;
+ char *str;
+ jclass jc;
+ jstring rstring;
+ jint *theArgs;
+ jboolean bb;
+ jobject o;
+
+
+ /* check for out of memory error ... */
+ str = (char *)HDmalloc(MAX_GR_NAME+1);
+ if (str == NULL) {
+ h4outOfMemory(env, "GRattrinfo");
+ }
+ else {
+ if (name == NULL) {
+ h4nullArgument(env, "GRattrinfo: name is NULL");
+ } /* end if */
+ else if (argv == NULL) {
+ h4nullArgument(env, "GRattrinfo: argv is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR argv) < 2) {
+ h4badArgument(env, "GRattrinfo: argv input array < order 2");
+ } /* end else if */
+ else {
+ theArgs = ENVPTR->GetIntArrayElements(ENVPAR argv, &bb);
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "GRattrinfo: argv not pinned");
+ } /* end if */
+ else {
+ rval = GRattrinfo((int32)gr_id, (int32)attr_index,
+ (char *)str, (int32 *)&(theArgs[0]), (int32 *)&(theArgs[1]));
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv, theArgs, JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv, theArgs, 0);
+ if (str != NULL) {
+ str[MAX_GR_NAME] = '\0';
+ rstring = ENVPTR->NewStringUTF(ENVPAR str);
+ o = ENVPTR->GetObjectArrayElement(ENVPAR name, 0);
+ if (o == NULL) {
+ CALL_ERROR_CHECK();
+ }
+ jc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (jc == NULL) {
+ CALL_ERROR_CHECK();
+ }
+ bb = ENVPTR->IsInstanceOf(ENVPAR o, jc);
+ if (bb == JNI_FALSE) {
+ CALL_ERROR_CHECK();
+ }
+ ENVPTR->SetObjectArrayElement(ENVPAR name, 0, (jobject)rstring);
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ }
+ }
+ } /* end else */
+ } /* end else */
+ HDfree(str);
+ }
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRgetattr
+(JNIEnv *env, jclass cls, jlong gr_id, jint attr_index, jbyteArray values)
+{
+ intn rval;
+ jbyte *arr;
+ jboolean bb;
+
+ if (values == NULL) {
+ h4nullArgument(env, "GRgetattr: values is NULL");
+ } /* end if */
+ else {
+ arr = ENVPTR->GetByteArrayElements(ENVPAR values, &bb);
+ if (arr == NULL) {
+ h4JNIFatalError(env, "GRgetattr: values not pinned");
+ } /* end if */
+ else {
+ rval = GRgetattr((int32)gr_id, (int32)attr_index, (VOIDP)arr);
+ if (rval == FAIL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR values, arr, JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR values, arr, 0);
+ }
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_GRfindattr
+(JNIEnv *env, jclass cls, jlong gr_id, jstring attr_name)
+{
+ int32 rval;
+ const char *str;
+
+ PIN_JAVA_STRING(attr_name, str);
+ if (str != NULL) {
+ rval = GRfindattr((int32)gr_id, str);
+
+ UNPIN_JAVA_STRING(attr_name, str);
+
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+ }
+
+ return (jint)rval;
+}
+
+JNIEXPORT jlong JNICALL
+Java_hdf_hdflib_HDFLibrary_GRcreate
+(JNIEnv *env, jclass cls, jlong gr_id, jstring name, jint ncomp, jlong data_type, jint interlace_mode, jintArray dim_sizes)
+{
+ int32 rval;
+ jint *dims;
+ const char *str;
+ jboolean bb;
+
+ if (dim_sizes == NULL) {
+ h4nullArgument(env, "GRcreate: dim_sizes is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR dim_sizes) < 2) {
+ h4badArgument(env, "GRcreate: dim_sizes input array < order 2");
+ } /* end else if */
+ else {
+ PIN_JAVA_STRING(name, str);
+ if (str != NULL) {
+ dims = ENVPTR->GetIntArrayElements(ENVPAR dim_sizes, &bb);
+ if (dims == NULL) {
+ h4JNIFatalError(env, "GRcreate: dim_sizes not pinned");
+ } /* end if */
+ else {
+ rval = GRcreate( (int32)gr_id, str, (int32)ncomp,
+ (int32)data_type, (int32)interlace_mode, (int32 *)dims);
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR dim_sizes, dims, JNI_ABORT);
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+ } /* end else */
+ UNPIN_JAVA_STRING(name, str);
+ }
+ } /* end else */
+ return (jlong)rval;
+}
+
+
+JNIEXPORT jshort JNICALL
+Java_hdf_hdflib_HDFLibrary_GRluttoref
+(JNIEnv *env, jclass cls, jlong pal_id)
+{
+ uint16 rval;
+
+ rval = GRluttoref((int32) pal_id);
+ if (rval <= 0)
+ CALL_ERROR_CHECK();
+
+ return (jshort)rval;
+}
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRsetattr__JLjava_lang_String_2JILjava_lang_String_2
+(JNIEnv *env, jclass cls, jlong gr_id, jstring attr_name, jlong data_type, jint count, jstring values)
+{
+ intn rval;
+ const char *str;
+ const char *val;
+
+ PIN_JAVA_STRING_TWO(attr_name, str, values, val);
+ if (str != NULL && val != NULL) {
+ rval = GRsetattr((int32)gr_id, str, (int32)data_type, (int32)count, (VOIDP)val);
+
+ UNPIN_JAVA_STRING_TWO(attr_name, str, values, val);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRsetattr__JLjava_lang_String_2JI_3B
+(JNIEnv *env, jclass cls, jlong gr_id, jstring attr_name, jlong data_type, jint count, jbyteArray values)
+{
+ intn rval;
+ jbyte *arr;
+ const char *str;
+ jboolean bb;
+
+ if (values == NULL) {
+ h4nullArgument(env, "GRsetattr: values is NULL");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(attr_name, str);
+ if (str != NULL) {
+ arr = ENVPTR->GetByteArrayElements(ENVPAR values, &bb);
+ if (arr == NULL) {
+ h4JNIFatalError(env, "GRsetattr: values not pinned");
+ } /* end if */
+ else {
+ rval = GRsetattr((int32)gr_id, str, (int32)data_type, (int32)count, (VOIDP)arr);
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR values, arr, JNI_ABORT);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ } /* end else */
+ UNPIN_JAVA_STRING(attr_name, str);
+ }
+ } /* end else */
+ return JNI_TRUE;
+}
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRsetcompress
+(JNIEnv *env, jclass cls, jlong ri_id, jint comp_type, jobject c_info)
+{
+ intn rval;
+ comp_info cinf;
+
+ if (c_info == NULL) {
+ h4nullArgument(env, "GRsetcompress: c_info is NULL");
+ } /* end if */
+ else {
+ if (getNewCompInfo(env, c_info, &cinf)) {
+ rval = GRsetcompress((int32)ri_id, (comp_coder_t)comp_type, (comp_info *)&cinf);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ } /* end if */
+ else {
+ h4JNIFatalError(env, "GRsetcompress: c_info not initialized");
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRgetcompress
+(JNIEnv *env, jclass cls, jlong ri_id, jobject c_info)
+{
+ intn rval;
+ comp_coder_t coder;
+ comp_info cinf;
+
+ if (c_info == NULL) {
+ h4nullArgument(env, "GRgetcompress: c_info is NULL");
+ } /* end if */
+ else {
+ rval = GRgetcompress((int32)ri_id, (comp_coder_t *)&coder, (comp_info *)&cinf);
+
+ if (setNewCompInfo(env, c_info, coder, &cinf)) {
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ } /* end if */
+ else {
+ h4JNIFatalError(env, "GRgetcompress: c_info not created");
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRgetcompinfo
+(JNIEnv *env, jclass cls, jlong ri_id, jobject c_info)
+{
+ intn rval;
+ comp_coder_t coder;
+ comp_info cinf;
+
+ if (c_info == NULL) {
+ h4nullArgument(env, "GRgetcompinfo: c_info is NULL");
+ } /* end if */
+ else {
+ rval = GRgetcompinfo((int32)ri_id, (comp_coder_t *)&coder, (comp_info *)&cinf);
+
+ if (setNewCompInfo(env, c_info, coder, &cinf)) {
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ } /* end if */
+ else {
+ h4JNIFatalError(env, "GRgetcompinfo: c_info not created");
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRsetchunk
+(JNIEnv *env, jclass cls, jlong sdsid, jobject chunk_def, jint flags)
+{
+ intn rval;
+ HDF_CHUNK_DEF c_def;
+
+ if (chunk_def == NULL) {
+ h4nullArgument(env, "GRsetchunk: chunk_def is NULL");
+ } /* end if */
+ else {
+ if (getChunkInfo(env, chunk_def, &c_def)) {
+ rval = SDsetchunk ((int32)sdsid, c_def, (int32)flags);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ } /* end if */
+ else {
+ h4JNIFatalError(env, "GRsetchunk: chunk_def not initialized");
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_GRsetchunkcache
+(JNIEnv *env, jclass cls, jlong sdsid, jint maxcache, jint flags)
+{
+ intn rval;
+
+ rval = SDsetchunkcache((int32)sdsid, (int32)maxcache, (int32)flags);
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+
+ return (jint)rval;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRsetexternalfile
+(JNIEnv *env, jclass cls, jlong ri_id, jstring filename, jint offset)
+{
+ intn rval;
+ const char *str;
+
+ PIN_JAVA_STRING(filename, str);
+ if (str != NULL) {
+ rval = GRsetexternalfile((int32)ri_id, str, (int32)offset);
+
+ UNPIN_JAVA_STRING(filename, str);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRwriteimage(JNIEnv *env, jclass cls, jlong ri_id,
+ jintArray start, jintArray stride, jintArray edge, jbyteArray data)
+{
+ intn rval;
+ jbyte *arr;
+ jint *strt;
+ jint *strd;
+ jint *edg;
+ jboolean bb;
+ jboolean cbb = 0;
+
+ if (data == NULL) {
+ h4nullArgument(env, "GRwriteimage: data is NULL");
+ } /* end if */
+ else if (start == NULL) {
+ h4nullArgument(env, "GRreadimage: start is NULL");
+ } /* end else if */
+ else if (ENVPTR->GetArrayLength(ENVPAR start) < 2) {
+ h4badArgument(env, "GRreadimage: start input array < order 2");
+ } /* end else if */
+ else if (edge == NULL) {
+ h4nullArgument(env, "GRreadimage: edge is NULL");
+ } /* end else if */
+ else if (ENVPTR->GetArrayLength(ENVPAR edge) < 2) {
+ h4badArgument(env, "GRreadimage: edge input array < order 2");
+ } /* end else if */
+ else {
+ arr = ENVPTR->GetByteArrayElements(ENVPAR data, &bb);
+ if (arr == NULL) {
+ h4JNIFatalError(env, "GRwriteimage: data not pinned");
+ } /* end if */
+ else {
+ strt = ENVPTR->GetIntArrayElements(ENVPAR start, &bb);
+ if (strt == NULL) {
+ h4JNIFatalError(env, "GRreadimage: start not pinned");
+ } /* end if */
+ else {
+ edg = ENVPTR->GetIntArrayElements(ENVPAR edge, &bb);
+ if (edg == NULL) {
+ h4JNIFatalError(env, "ANget_tagref: edge not pinned");
+ } /* end if */
+ else {
+ if (stride == NULL) {
+ strd = NULL;
+ }
+ else {
+ strd = ENVPTR->GetIntArrayElements(ENVPAR stride,&bb);
+ }
+ rval = GRwriteimage((int32)ri_id, (int32 *)strt, (int32 *)strd,
+ (int32 *)edg, (VOIDP)arr);
+
+ if (stride != NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR stride, strd, JNI_ABORT);
+ }
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR edge, edg, JNI_ABORT);
+ } /* end else */
+ ENVPTR->ReleaseIntArrayElements(ENVPAR start, strt, JNI_ABORT);
+ } /* end else */
+ ENVPTR->ReleaseByteArrayElements(ENVPAR data, arr, JNI_ABORT);
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRwritelut
+(JNIEnv *env, jclass cls, jlong pal_id, jint ncomp, jint data_type, jint interlace, jint num_entries, jbyteArray pal_data)
+{
+ intn rval;
+ jbyte *arr;
+ jboolean bb;
+
+ if (pal_data == NULL) {
+ h4nullArgument(env, "GRwritelut: pal_data is NULL");
+ } /* end if */
+ else {
+ arr = ENVPTR->GetByteArrayElements(ENVPAR pal_data, &bb);
+ if (arr == NULL) {
+ h4JNIFatalError(env, "GRwritelut: pal_data not pinned");
+ } /* end if */
+ else {
+ rval = GRwritelut((int32)pal_id, (int32)ncomp, (int32)data_type,
+ (int32)interlace, (int32)num_entries, (VOIDP)arr);
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR pal_data, arr, JNI_ABORT);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRreadchunk
+(JNIEnv *env, jclass cls, jlong grid, jintArray origin, jbyteArray dat)
+{
+ int32 rval;
+ jbyte *arr;
+ jint *org;
+ jboolean bb;
+ jboolean cbb = 0;
+
+ if (dat == NULL) {
+ h4nullArgument(env, "GRreadchunk: dat is NULL");
+ } /* end if */
+ else if (origin == NULL) {
+ h4nullArgument(env, "GRreadchunk: origin is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR origin) < 2) {
+ h4badArgument(env, "GRreadchunk: origin input array < order 2");
+ } /* end else if */
+ else {
+ arr = ENVPTR->GetByteArrayElements(ENVPAR dat, &bb);
+ if (arr == NULL) {
+ h4JNIFatalError(env, "GRreadchunk: dat not pinned");
+ } /* end if */
+ else {
+ org = ENVPTR->GetIntArrayElements(ENVPAR origin, &bb);
+ if (org == NULL) {
+ h4JNIFatalError(env, "GRreadchunk: origin not pinned");
+ } /* end if */
+ else {
+ rval = GRreadchunk((int32)grid, (int32 *)org, arr);
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR origin, org, JNI_ABORT);
+ if (rval == FAIL) {
+ cbb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ } /* end else */
+ ENVPTR->ReleaseByteArrayElements(ENVPAR dat, arr, cbb);
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_GRwritechunk
+(JNIEnv *env, jclass cls, jlong grid, jintArray origin, jbyteArray dat)
+{
+ int32 rval;
+ jbyte * arr;
+ jint * org;
+ jboolean bb;
+
+ if (dat == NULL) {
+ h4nullArgument(env, "GRwritechunk: dat is NULL");
+ } /* end if */
+ else if (origin == NULL) {
+ h4nullArgument(env, "GRwritechunk: origin is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR origin) < 2) {
+ h4badArgument(env, "GRwritechunk: origin input array < order 2");
+ } /* end else if */
+ else {
+ arr = ENVPTR->GetByteArrayElements(ENVPAR dat, &bb);
+ if (arr == NULL) {
+ h4JNIFatalError(env, "GRwritechunk: dat not pinned");
+ } /* end if */
+ else {
+ org = ENVPTR->GetIntArrayElements(ENVPAR origin, &bb);
+ if (org == NULL) {
+ h4JNIFatalError(env, "GRwritechunk: origin not pinned");
+ } /* end if */
+ else {
+ rval = GRwritechunk((int32)grid, (int32 *)org, (char *)arr);
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR origin, org, JNI_ABORT);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ } /* end else */
+ ENVPTR->ReleaseByteArrayElements(ENVPAR dat, arr, JNI_ABORT);
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/src/jni/hdfheImp.c b/java/src/jni/hdfheImp.c
new file mode 100644
index 0000000..dd2aaa4
--- /dev/null
+++ b/java/src/jni/hdfheImp.c
@@ -0,0 +1,52 @@
+
+/****************************************************************************
+ ****************************************************************************/
+/*
+ * This code is the C-interface called by Java programs to access the
+ * HDF 4 library.
+ *
+ * Each routine wraps a single HDF entry point, generally with the
+ * analogous arguments and return codes.
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+#include "hdf.h"
+#include "hfile.h"
+#include "jni.h"
+#include "h4jni.h"
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_HEvalue
+(JNIEnv *env, jclass clss, jint level)
+{
+ return HEvalue((int32) level);
+}
+
+JNIEXPORT void JNICALL
+Java_hdf_hdflib_HDFLibrary_HEprint
+(JNIEnv *env, jclass clss, jobject stream, jint level)
+{
+ HEprint((FILE *) stream, (int32) level);
+}
+
+JNIEXPORT jstring JNICALL
+Java_hdf_hdflib_HDFLibrary_HEstring
+(JNIEnv *env, jclass clss, jshort error_code)
+{
+ char * str;
+ jstring rstring;
+
+ str = (char *)HEstring((hdf_err_code_t)error_code);
+
+ rstring = ENVPTR->NewStringUTF(ENVPAR str);
+
+ return rstring;
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/src/jni/hdfhxImp.c b/java/src/jni/hdfhxImp.c
new file mode 100644
index 0000000..3fe3678
--- /dev/null
+++ b/java/src/jni/hdfhxImp.c
@@ -0,0 +1,88 @@
+
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+/*
+ * This code is the C-interface called by Java programs to access the
+ * HDF 4 library.
+ *
+ * Each routine wraps a single HDF entry point, generally with the
+ * analogous arguments and return codes.
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf.h"
+#include "jni.h"
+#include "h4jni.h"
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_HXsetcreatedir
+(JNIEnv *env, jclass clss, jstring dir)
+{
+ intn rval;
+ char *str;
+
+ if (dir != NULL) {
+ str =(char *) ENVPTR->GetStringUTFChars(ENVPAR dir,0);
+ }
+ else {
+ str = NULL;
+ }
+
+ rval = HXsetcreatedir((char *)str);
+
+ if (str != NULL) {
+ ENVPTR->ReleaseStringUTFChars(ENVPAR dir,str);
+ }
+
+ if (rval == FAIL) {
+ return JNI_FALSE;
+ }
+ else {
+ return JNI_TRUE;
+ }
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_HXsetdir
+(JNIEnv *env, jclass clss, jstring dir)
+{
+ intn rval;
+ char *str;
+
+ if (dir != NULL) {
+ str =(char *) ENVPTR->GetStringUTFChars(ENVPAR dir,0);
+ }
+ else {
+ str = NULL;
+ }
+
+ rval = HXsetdir(str);
+
+ if (str != NULL) {
+ ENVPTR->ReleaseStringUTFChars(ENVPAR dir,str);
+ }
+
+ if (rval == FAIL) {
+ return JNI_FALSE;
+ }
+ else {
+ return JNI_TRUE;
+ }
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/src/jni/hdfnativeImp.c b/java/src/jni/hdfnativeImp.c
new file mode 100644
index 0000000..657c84c
--- /dev/null
+++ b/java/src/jni/hdfnativeImp.c
@@ -0,0 +1,1200 @@
+
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+/*
+ * This module contains the implementation of all the native methods
+ * used for number conversion. This is represented by the Java
+ * class HDFNativeData.
+ *
+ * These routines convert one dimensional arrays of bytes into
+ * one-D arrays of other types (int, float, etc) and vice versa.
+ *
+ * These routines are called from the Java parts of the Java-C
+ * interface.
+ *
+ * ***Important notes:
+ *
+ * 1. These routines are designed to be portable--they use the
+ * C compiler to do the required native data manipulation.
+ * 2. These routines copy the data at least once -- a serious
+ * but unavoidable performance hit.
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf.h"
+#include "jni.h"
+#include "h4jni.h"
+
+
+JNIEXPORT jintArray JNICALL
+Java_hdf_hdflib_HDFNativeData_byteToInt___3B
+(JNIEnv *env, jclass clss, jbyteArray bdata)
+{
+ jbyte *barr;
+ jintArray rarray;
+ int blen;
+ jint *iarray;
+ jboolean bb;
+ char *bp;
+ jint *iap;
+ int ii;
+ int len;
+
+ if (bdata == NULL) {
+ h4raiseException( env, "byteToInt: bdata is NULL?");
+ return NULL;
+ }
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h4JNIFatalError( env, "byteToInt: pin failed");
+ return NULL;
+ }
+
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+
+ len = blen/sizeof(jint);
+ rarray = ENVPTR->NewIntArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4outOfMemory( env, "byteToInt" );
+ return NULL;
+ }
+
+ iarray = ENVPTR->GetIntArrayElements(ENVPAR rarray,&bb);
+ if (iarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4JNIFatalError( env, "byteToInt: pin iarray failed");
+ return NULL;
+ }
+
+ bp = (char *)barr;
+ iap = iarray;
+ for (ii = 0; ii < len; ii++) {
+ *iap = *(jint *)bp;
+ iap++;
+ bp += sizeof(jint);
+ }
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR rarray,iarray, 0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+
+}
+
+
+JNIEXPORT jfloatArray JNICALL
+Java_hdf_hdflib_HDFNativeData_byteToFloat___3B
+(JNIEnv *env, jclass clss, jbyteArray bdata)
+{
+ jbyte *barr;
+ jfloatArray rarray;
+ int blen;
+ jfloat *farray;
+ jboolean bb;
+ char *bp;
+ jfloat *iap;
+ int ii;
+ int len;
+
+ if (bdata == NULL) {
+ h4raiseException( env, "byteToFloat: bdata is NULL?");
+ return NULL;
+ }
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h4JNIFatalError( env, "byteToFloat: pin failed");
+ return NULL;
+ }
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+
+ len = blen/sizeof(jfloat);
+ rarray = ENVPTR->NewFloatArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4outOfMemory( env, "byteToFloat" );
+ return NULL;
+ }
+ farray = ENVPTR->GetFloatArrayElements(ENVPAR rarray,&bb);
+ if (farray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4JNIFatalError( env, "byteToFloat: pin farray failed");
+ return NULL;
+ }
+
+ bp = (char *)barr;
+ iap = farray;
+ for (ii = 0; ii < len; ii++) {
+ *iap = *(jfloat *)bp;
+ iap++;
+ bp += sizeof(jfloat);
+ }
+
+ ENVPTR->ReleaseFloatArrayElements(ENVPAR rarray,farray, 0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+
+}
+
+JNIEXPORT jshortArray JNICALL
+Java_hdf_hdflib_HDFNativeData_byteToShort___3B
+(JNIEnv *env, jclass clss, jbyteArray bdata)
+{
+ jbyte *barr;
+ jshortArray rarray;
+ int blen;
+ jshort *sarray;
+ jboolean bb;
+ char *bp;
+ jshort *iap;
+ int ii;
+ int len;
+
+ if (bdata == NULL) {
+ h4raiseException( env, "byteToShort: bdata is NULL?");
+ return NULL;
+ }
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h4JNIFatalError( env, "byteToShort: pin failed");
+ return NULL;
+ }
+
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+
+ len = blen/sizeof(jshort);
+ rarray = ENVPTR->NewShortArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4outOfMemory( env, "byteToShort" );
+ return NULL;
+ }
+
+ sarray = ENVPTR->GetShortArrayElements(ENVPAR rarray,&bb);
+ if (sarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4JNIFatalError( env, "byteToShort: pin sarray failed");
+ return NULL;
+ }
+
+ bp = (char *)barr;
+ iap = sarray;
+ for (ii = 0; ii < len; ii++) {
+ *iap = *(jshort *)bp;
+ iap++;
+ bp += sizeof(jshort);
+ }
+
+ ENVPTR->ReleaseShortArrayElements(ENVPAR rarray,sarray, 0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+
+}
+
+
+JNIEXPORT jlongArray JNICALL
+Java_hdf_hdflib_HDFNativeData_byteToLong___3B
+(JNIEnv *env, jclass clss, jbyteArray bdata)
+{
+ jbyte *barr;
+ jlongArray rarray;
+ int blen;
+ jlong *larray;
+ jboolean bb;
+ char *bp;
+ jlong *iap;
+ int ii;
+ int len;
+
+ if (bdata == NULL) {
+ h4raiseException( env, "byteToLong: bdata is NULL?");
+ return NULL;
+ }
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h4JNIFatalError( env, "byteToLong: pin failed");
+ return NULL;
+ }
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+
+ len = blen/sizeof(jlong);
+ rarray = ENVPTR->NewLongArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4outOfMemory( env, "byteToLong" );
+ return NULL;
+ }
+
+ larray = ENVPTR->GetLongArrayElements(ENVPAR rarray,&bb);
+ if (larray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4JNIFatalError( env, "byteToLong: pin larray failed");
+ return NULL;
+ }
+
+ bp = (char *)barr;
+ iap = larray;
+ for (ii = 0; ii < len; ii++) {
+ *iap = *(jlong *)bp;
+ iap++;
+ bp += sizeof(jlong);
+ }
+ ENVPTR->ReleaseLongArrayElements(ENVPAR rarray,larray, 0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+
+}
+
+
+JNIEXPORT jdoubleArray JNICALL
+Java_hdf_hdflib_HDFNativeData_byteToDouble___3B
+(JNIEnv *env, jclass clss, jbyteArray bdata)
+{
+ jbyte *barr;
+ jdoubleArray rarray;
+ int blen;
+ jdouble *darray;
+ jboolean bb;
+ char *bp;
+ jdouble *iap;
+ int ii;
+ int len;
+
+ if (bdata == NULL) {
+ h4raiseException( env, "byteToDouble: bdata is NULL?");
+ return NULL;
+ }
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h4JNIFatalError( env, "byteToDouble: pin failed");
+ return NULL;
+ }
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+
+ len = blen/sizeof(jdouble);
+ rarray = ENVPTR->NewDoubleArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4outOfMemory( env, "byteToDouble" );
+ return NULL;
+ }
+
+ darray = ENVPTR->GetDoubleArrayElements(ENVPAR rarray,&bb);
+ if (darray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4JNIFatalError( env, "byteToDouble: pin darray failed");
+ return NULL;
+ }
+
+ bp = (char *)barr;
+ iap = darray;
+ for (ii = 0; ii < len; ii++) {
+ *iap = *(jdouble *)bp;
+ iap++;
+ bp += sizeof(jdouble);
+ }
+
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR rarray,darray,0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+}
+
+
+JNIEXPORT jintArray JNICALL
+Java_hdf_hdflib_HDFNativeData_byteToInt__II_3B
+(JNIEnv *env, jclass clss, jint start, jint len, jbyteArray bdata)
+{
+ char *bp;
+ jbyte *barr;
+ jintArray rarray;
+ int blen;
+ jint *iarray;
+ jint *iap;
+ int ii;
+ jboolean bb;
+
+ if (bdata == NULL) {
+ h4raiseException( env, "byteToInt: bdata is NULL?");
+ return NULL;
+ }
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h4JNIFatalError( env, "byteToInt: pin failed");
+ return NULL;
+ }
+
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+ if ((start < 0) || ((int)(start + (len*sizeof(jint))) > blen)) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4JNIFatalError( env, "byteToInt: getLen failed");
+ return NULL;
+ }
+
+ bp = (char *)barr + start;
+
+ rarray = ENVPTR->NewIntArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4outOfMemory( env, "byteToInt" );
+ return NULL;
+ }
+
+ iarray = ENVPTR->GetIntArrayElements(ENVPAR rarray,&bb);
+ if (iarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4JNIFatalError( env, "byteToInt: pin iarray failed");
+ return NULL;
+ }
+
+ iap = iarray;
+ for (ii = 0; ii < len; ii++) {
+ *iap = *(jint *)bp;
+ iap++;
+ bp += sizeof(jint);
+ }
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR rarray,iarray, 0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+
+}
+
+JNIEXPORT jshortArray JNICALL
+Java_hdf_hdflib_HDFNativeData_byteToShort__II_3B
+(JNIEnv *env, jclass clss, jint start, jint len, jbyteArray bdata)
+{
+ char *bp;
+ jbyte *barr;
+ jshortArray rarray;
+ int blen;
+ jshort *iarray;
+ jshort *iap;
+ int ii;
+ jboolean bb;
+
+ if (bdata == NULL) {
+ h4raiseException( env, "byteToShort: bdata is NULL?");
+ return NULL;
+ }
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h4JNIFatalError( env, "byteToShort: getByte failed?");
+ return NULL;
+ }
+
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+ if ((start < 0) || ((int)(start + (len*(sizeof(jshort)))) > blen)) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4raiseException( env, "byteToShort: start or len is out of bounds");
+ return NULL;
+ }
+
+ bp = (char *)barr + start;
+
+ rarray = ENVPTR->NewShortArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4outOfMemory( env, "byteToShort" );
+ return NULL;
+ }
+
+ iarray = ENVPTR->GetShortArrayElements(ENVPAR rarray,&bb);
+ if (iarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4JNIFatalError( env, "byteToShort: getShort failed?");
+ return NULL;
+ }
+
+ iap = iarray;
+ for (ii = 0; ii < len; ii++) {
+ *iap = *(jshort *)bp;
+ iap++;
+ bp += sizeof(jshort);
+ }
+
+ ENVPTR->ReleaseShortArrayElements(ENVPAR rarray,iarray, 0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+
+}
+
+JNIEXPORT jfloatArray JNICALL
+Java_hdf_hdflib_HDFNativeData_byteToFloat__II_3B
+(JNIEnv *env, jclass clss, jint start, jint len, jbyteArray bdata)
+{
+ char *bp;
+ jbyte *barr;
+ jfloatArray rarray;
+ int blen;
+ jfloat *iarray;
+ jfloat *iap;
+ int ii;
+ jboolean bb;
+
+ if (bdata == NULL) {
+ h4raiseException( env, "byteToFloat: bdata is NULL?");
+ return NULL;
+ }
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h4JNIFatalError( env, "byteToFloat: getByte failed?");
+ return NULL;
+ }
+
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+ if ((start < 0) || ((int)(start + (len*(sizeof(jfloat)))) > blen)) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4raiseException( env, "byteToFloat: start or len is out of bounds");
+ return NULL;
+ }
+
+ bp = (char *)barr + start;
+
+ rarray = ENVPTR->NewFloatArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4outOfMemory( env, "byteToFloat" );
+ return NULL;
+ }
+
+ iarray = ENVPTR->GetFloatArrayElements(ENVPAR rarray,&bb);
+ if (iarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4JNIFatalError( env, "byteToFloat: getFloat failed?");
+ return NULL;
+ }
+
+ iap = iarray;
+ for (ii = 0; ii < len; ii++) {
+ *iap = *(jfloat *)bp;
+ iap++;
+ bp += sizeof(jfloat);
+ }
+
+ ENVPTR->ReleaseFloatArrayElements(ENVPAR rarray,iarray, 0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+
+}
+
+JNIEXPORT jlongArray JNICALL
+Java_hdf_hdflib_HDFNativeData_byteToLong__II_3B
+(JNIEnv *env, jclass clss, jint start, jint len, jbyteArray bdata)
+{
+ char *bp;
+ jbyte *barr;
+ jlongArray rarray;
+ int blen;
+ jlong *iarray;
+ jlong *iap;
+ int ii;
+ jboolean bb;
+
+ if (bdata == NULL) {
+ h4raiseException( env, "byteToLong: bdata is NULL?");
+ return NULL;
+ }
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h4JNIFatalError( env, "byteToLong: getByte failed?");
+ return NULL;
+ }
+
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+ if ((start < 0) || ((int)(start + (len*(sizeof(jlong)))) > blen)) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4raiseException( env, "byteToLong: start or len is out of bounds");
+ return NULL;
+ }
+
+ bp = (char *)barr + start;
+
+ rarray = ENVPTR->NewLongArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4outOfMemory( env, "byteToLong" );
+ return NULL;
+ }
+
+ iarray = ENVPTR->GetLongArrayElements(ENVPAR rarray,&bb);
+ if (iarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4JNIFatalError( env, "byteToLong: getLong failed?");
+ return NULL;
+ }
+
+ iap = iarray;
+ for (ii = 0; ii < len; ii++) {
+
+ *iap = *(jlong *)bp;
+ iap++;
+ bp += sizeof(jlong);
+ }
+
+ ENVPTR->ReleaseLongArrayElements(ENVPAR rarray,iarray, 0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+
+}
+
+JNIEXPORT jdoubleArray JNICALL
+Java_hdf_hdflib_HDFNativeData_byteToDouble__II_3B
+(JNIEnv *env, jclass clss, jint start, jint len, jbyteArray bdata)
+{
+ char *bp;
+ jbyte *barr;
+ jdoubleArray rarray;
+ int blen;
+ jdouble *iarray;
+ jdouble *iap;
+ int ii;
+ jboolean bb;
+
+ if (bdata == NULL) {
+ h4raiseException( env, "byteToDouble: bdata is NULL?");
+ return NULL;
+ }
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h4JNIFatalError( env, "byteToDouble: getByte failed?");
+ return NULL;
+ }
+
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+ if ((start < 0) || ((int)(start + (len*(sizeof(jdouble)))) > blen)) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4raiseException( env, "byteToDouble: start or len is out of bounds");
+ return NULL;
+ }
+
+ bp = (char *)barr + start;
+
+ rarray = ENVPTR->NewDoubleArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h4outOfMemory( env, "byteToDouble" );
+ return NULL;
+ }
+
+ iarray = ENVPTR->GetDoubleArrayElements(ENVPAR rarray,&bb);
+ if (iarray == NULL) {
+ h4JNIFatalError( env, "byteToDouble: getDouble failed?");
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ return NULL;
+ }
+
+ iap = iarray;
+ for (ii = 0; ii < len; ii++) {
+ *iap = *(jdouble *)bp;
+ iap++;
+ bp += sizeof(jdouble);
+ }
+
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR rarray,iarray, 0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+}
+
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdflib_HDFNativeData_intToByte__II_3I
+(JNIEnv *env, jclass clss, jint start, jint len, jintArray idata)
+{
+ jint *ip;
+ jint *iarr;
+ int ilen;
+ jbyteArray rarray;
+ int blen;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ii;
+ int ij;
+ union things {
+ int ival;
+ char bytes[4];
+ } u;
+
+ if (idata == NULL) {
+ h4raiseException( env, "intToByte: idata is NULL?");
+ return NULL;
+ }
+ iarr = ENVPTR->GetIntArrayElements(ENVPAR idata,&bb);
+ if (iarr == NULL) {
+ h4JNIFatalError( env, "intToByte: getInt failed?");
+ return NULL;
+ }
+
+ ilen = ENVPTR->GetArrayLength(ENVPAR idata);
+ if ((start < 0) || (((start + len)) > ilen)) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h4raiseException( env, "intToByte: start or len is out of bounds");
+ return NULL;
+ }
+
+ ip = iarr + start;
+
+ blen = ilen * sizeof(jint);
+ rarray = ENVPTR->NewByteArray(ENVPAR blen);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h4outOfMemory( env, "intToByte" );
+ return NULL;
+ }
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h4JNIFatalError( env, "intToByte: getByte failed?");
+ return NULL;
+ }
+
+ bap = barray;
+ for (ii = 0; ii < len; ii++) {
+ u.ival = *ip++;
+ for (ij = 0; ij < sizeof(jint); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ }
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,barray, 0);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+
+ return rarray;
+
+}
+
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdflib_HDFNativeData_shortToByte__II_3S
+(JNIEnv *env, jclass clss, jint start, jint len, jshortArray idata)
+{
+ jshort *ip;
+ jshort *iarr;
+ int ilen;
+ jbyteArray rarray;
+ int blen;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ii;
+ int ij;
+ union things {
+ short ival;
+ char bytes[4];
+ } u;
+
+ if (idata == NULL) {
+ h4raiseException( env, "shortToByte: idata is NULL?");
+ return NULL;
+ }
+ iarr = ENVPTR->GetShortArrayElements(ENVPAR idata,&bb);
+ if (iarr == NULL) {
+ h4JNIFatalError( env, "shortToByte: getShort failed?");
+ return NULL;
+ }
+
+ ilen = ENVPTR->GetArrayLength(ENVPAR idata);
+ if ((start < 0) || (((start + len)) > ilen)) {
+ ENVPTR->ReleaseShortArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h4raiseException( env, "shortToByte: start or len is out of bounds");
+ return NULL;
+ }
+
+ ip = iarr + start;
+
+ blen = ilen * sizeof(jshort);
+ rarray = ENVPTR->NewByteArray(ENVPAR blen);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseShortArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h4outOfMemory( env, "shortToByte" );
+ return NULL;
+ }
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ ENVPTR->ReleaseShortArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h4JNIFatalError( env, "shortToByte: getByte failed?");
+ return NULL;
+ }
+
+ bap = barray;
+ for (ii = 0; ii < len; ii++) {
+ u.ival = *ip++;
+ for (ij = 0; ij < sizeof(jshort); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ }
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,barray, 0);
+ ENVPTR->ReleaseShortArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+
+ return rarray;
+
+}
+
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdflib_HDFNativeData_floatToByte__II_3F
+(JNIEnv *env, jclass clss, jint start, jint len, jfloatArray idata)
+{
+ jfloat *ip;
+ jfloat *iarr;
+ int ilen;
+ jbyteArray rarray;
+ int blen;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ii;
+ int ij;
+ union things {
+ float ival;
+ char bytes[4];
+ } u;
+
+ if (idata == NULL) {
+ h4raiseException( env, "floatToByte: idata is NULL?");
+ return NULL;
+ }
+ iarr = ENVPTR->GetFloatArrayElements(ENVPAR idata,&bb);
+ if (iarr == NULL) {
+ h4JNIFatalError( env, "floatToByte: getFloat failed?");
+ return NULL;
+ }
+
+ ilen = ENVPTR->GetArrayLength(ENVPAR idata);
+ if ((start < 0) || (((start + len)) > ilen)) {
+ ENVPTR->ReleaseFloatArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h4raiseException( env, "floatToByte: start or len is out of bounds");
+ return NULL;
+ }
+
+ ip = iarr + start;
+
+ blen = ilen * sizeof(jfloat);
+ rarray = ENVPTR->NewByteArray(ENVPAR blen);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseFloatArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h4outOfMemory( env, "floatToByte" );
+ return NULL;
+ }
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ ENVPTR->ReleaseFloatArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h4JNIFatalError( env, "floatToByte: getByte failed?");
+ return NULL;
+ }
+
+ bap = barray;
+ for (ii = 0; ii < len; ii++) {
+ u.ival = *ip++;
+ for (ij = 0; ij < sizeof(jfloat); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ }
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,barray, 0);
+ ENVPTR->ReleaseFloatArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+
+ return rarray;
+
+}
+
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdflib_HDFNativeData_doubleToByte__II_3D
+(JNIEnv *env, jclass clss, jint start, jint len, jdoubleArray idata)
+{
+ jdouble *ip;
+ jdouble *iarr;
+ int ilen;
+ jbyteArray rarray;
+ int blen;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ii;
+ int ij;
+ union things {
+ double ival;
+ char bytes[8];
+ } u;
+
+ if (idata == NULL) {
+ h4raiseException( env, "doubleToByte: idata is NULL?");
+ return NULL;
+ }
+ iarr = ENVPTR->GetDoubleArrayElements(ENVPAR idata,&bb);
+ if (iarr == NULL) {
+ h4JNIFatalError( env, "doubleToByte: getDouble failed?");
+ return NULL;
+ }
+
+ ilen = ENVPTR->GetArrayLength(ENVPAR idata);
+ if ((start < 0) || (((start + len)) > ilen)) {
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h4raiseException( env, "doubleToByte: start or len is out of bounds");
+ return NULL;
+ }
+
+ ip = iarr + start;
+
+ blen = ilen * sizeof(jdouble);
+ rarray = ENVPTR->NewByteArray(ENVPAR blen);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h4outOfMemory( env, "doubleToByte" );
+ return NULL;
+ }
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h4JNIFatalError( env, "doubleToByte: getByte failed?");
+ return NULL;
+ }
+
+ bap = barray;
+ for (ii = 0; ii < len; ii++) {
+ u.ival = *ip++;
+ for (ij = 0; ij < sizeof(jdouble); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ }
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,barray, 0);
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+
+ return rarray;
+
+}
+
+
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdflib_HDFNativeData_longToByte__II_3J
+(JNIEnv *env, jclass clss, jint start, jint len, jlongArray idata)
+{
+ jlong *ip;
+ jlong *iarr;
+ int ilen;
+ jbyteArray rarray;
+ int blen;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ii;
+ int ij;
+ union things {
+ jlong ival;
+ char bytes[8];
+ } u;
+
+ if (idata == NULL) {
+ h4raiseException( env, "longToByte: idata is NULL?");
+ return NULL;
+ }
+ iarr = ENVPTR->GetLongArrayElements(ENVPAR idata,&bb);
+ if (iarr == NULL) {
+ h4JNIFatalError( env, "longToByte: getLong failed?");
+ return NULL;
+ }
+
+ ilen = ENVPTR->GetArrayLength(ENVPAR idata);
+ if ((start < 0) || (((start + len)) > ilen)) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h4raiseException( env, "longToByte: start or len is out of bounds?\n");
+ return NULL;
+ }
+
+ ip = iarr + start;
+
+ blen = ilen * sizeof(jlong);
+ rarray = ENVPTR->NewByteArray(ENVPAR blen);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h4outOfMemory( env, "longToByte" );
+ return NULL;
+ }
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h4JNIFatalError( env, "longToByte: getByte failed?");
+ return NULL;
+ }
+
+ bap = barray;
+ for (ii = 0; ii < len; ii++) {
+ u.ival = *ip++;
+ for (ij = 0; ij < sizeof(jlong); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ }
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,barray, 0);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+
+ return rarray;
+
+}
+
+
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdflib_HDFNativeData_intToByte__I
+(JNIEnv *env, jclass clss, jint idata)
+{
+ jbyteArray rarray;
+ jbyte *barray;
+ jbyte *bap;
+ int ij;
+ jboolean bb;
+ union things {
+ int ival;
+ char bytes[sizeof(int)];
+ } u;
+
+ rarray = ENVPTR->NewByteArray(ENVPAR sizeof(jint));
+ if (rarray == NULL) {
+ h4outOfMemory( env, "intToByte" );
+ return NULL;
+ }
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ h4JNIFatalError( env, "intToByte: getByte failed?");
+ return NULL;
+ }
+
+ bap = barray;
+ u.ival = idata;
+ for (ij = 0; ij < sizeof(jint); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,barray, 0);
+ return rarray;
+
+}
+
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdflib_HDFNativeData_floatToByte__F
+(JNIEnv *env, jclass clss, jfloat idata)
+{
+ jbyteArray rarray;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ij;
+ union things {
+ float ival;
+ char bytes[sizeof(float)];
+ } u;
+
+ rarray = ENVPTR->NewByteArray(ENVPAR sizeof(jfloat));
+ if (rarray == NULL) {
+ h4outOfMemory( env, "floatToByte" );
+ return NULL;
+ }
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ h4JNIFatalError( env, "floatToByte: getByte failed?");
+ return NULL;
+ }
+
+ bap = barray;
+ u.ival = idata;
+ for (ij = 0; ij < sizeof(jfloat); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,(jbyte *)barray, 0);
+ return rarray;
+
+}
+
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdflib_HDFNativeData_shortToByte__S
+(JNIEnv *env, jclass clss, jshort idata)
+{
+ jbyteArray rarray;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ij;
+ union things {
+ short ival;
+ char bytes[sizeof(short)];
+ } u;
+
+ rarray = ENVPTR->NewByteArray(ENVPAR sizeof(jshort));
+ if (rarray == NULL) {
+ h4outOfMemory( env, "shortToByte" );
+ return NULL;
+ }
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ h4JNIFatalError( env, "shortToByte: getShort failed?");
+ return NULL;
+ }
+
+ bap = barray;
+ u.ival = idata;
+ for (ij = 0; ij < sizeof(jshort); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,(jbyte *)barray, 0);
+
+ return rarray;
+
+}
+
+
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdflib_HDFNativeData_doubleToByte__D
+(JNIEnv *env, jclass clss, jdouble idata)
+{
+ jbyteArray rarray;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ij;
+ union things {
+ double ival;
+ char bytes[sizeof(double)];
+ } u;
+
+ rarray = ENVPTR->NewByteArray(ENVPAR sizeof(jdouble));
+ if (rarray == NULL) {
+ h4outOfMemory( env, "doubleToByte" );
+ return NULL;
+ }
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ h4JNIFatalError( env, "doubleToByte: getDouble failed?");
+ return NULL;
+ }
+
+ bap = barray;
+ u.ival = idata;
+ for (ij = 0; ij < sizeof(jdouble); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,(jbyte *)barray, 0);
+
+ return rarray;
+}
+
+
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdflib_HDFNativeData_longToByte__J
+(JNIEnv *env, jclass clss, jlong idata)
+{
+ jbyteArray rarray;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ij;
+ union things {
+ jlong ival;
+ char bytes[sizeof(jlong)];
+ } u;
+
+ rarray = ENVPTR->NewByteArray(ENVPAR sizeof(jlong));
+ if (rarray == NULL) {
+ h4outOfMemory( env, "longToByte" );
+ return NULL;
+ }
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ h4JNIFatalError( env, "longToByte: getLong failed?");
+ return NULL;
+ }
+
+ bap = barray;
+ u.ival = idata;
+ for (ij = 0; ij < sizeof(jlong); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,(jbyte *)barray, 0);
+
+ return rarray;
+
+}
+
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdflib_HDFNativeData_byteToByte__B
+(JNIEnv *env, jclass clss, jbyte idata)
+{
+ jbyteArray rarray;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ij;
+ union things {
+ jbyte ival;
+ char bytes[sizeof(jbyte)];
+ } u;
+
+ rarray = ENVPTR->NewByteArray(ENVPAR sizeof(jbyte));
+ if (rarray == NULL) {
+ h4outOfMemory( env, "byteToByte" );
+ return NULL;
+ }
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ h4JNIFatalError( env, "byteToByte: getByte failed?");
+ return NULL;
+ }
+
+ bap = barray;
+ u.ival = idata;
+ for (ij = 0; ij < sizeof(jbyte); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,(jbyte *)barray, 0);
+
+ return rarray;
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/src/jni/hdfr24Imp.c b/java/src/jni/hdfr24Imp.c
new file mode 100644
index 0000000..8443c3e
--- /dev/null
+++ b/java/src/jni/hdfr24Imp.c
@@ -0,0 +1,360 @@
+
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+/*
+ * This code is the C-interface called by Java programs to access the
+ * HDF 4 library.
+ *
+ * Each routine wraps a single HDF entry point, generally with the
+ * analogous arguments and return codes.
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf.h"
+#include "jni.h"
+#include "h4jni.h"
+
+extern jboolean
+getOldCompInfo
+(JNIEnv *env, jobject ciobj, comp_info *cinf);
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DF24getdims
+(JNIEnv *env, jclass clss, jstring filename, jintArray argv)
+{
+ intn rval;
+
+ const char *hdf_file;
+ int copyMode;
+ jint *theArgs;
+ jboolean bb;
+
+ copyMode = JNI_ABORT;
+
+ if (argv == NULL) {
+ h4nullArgument(env, "DF24getdims: output array argv is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR argv) < 3) {
+ h4badArgument(env, "DF24getdims: output array argv < order 3");
+ } /* end else if */
+ else {
+ theArgs = ENVPTR->GetIntArrayElements(ENVPAR argv,&bb);
+
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "DF24getdims: argv not pinned");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(filename, hdf_file);
+
+ if (hdf_file != NULL) {
+ /* get image dimension information */
+ rval = DF24getdims(hdf_file, (int32 *)&(theArgs[0]),
+ (int32 *)&(theArgs[1]), (intn *)&(theArgs[2]));
+
+ UNPIN_JAVA_STRING(filename, hdf_file);
+
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ } else {
+ copyMode = 0;
+ }
+ }
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv,theArgs,copyMode);
+ }/* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DF24getimage
+(JNIEnv *env, jclass clss, jstring filename, jbyteArray image, jint width, jint height)
+{
+ const char *hdf_file;
+ int copyMode;
+ intn rval;
+ jbyte *dat;
+ jboolean bb;
+
+ copyMode = JNI_ABORT;
+
+ if (image == NULL) {
+ h4nullArgument(env, "DF24getimage: output array image is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR image) < 1) {
+ h4badArgument(env, "DF24getimage: output array image invalid size");
+ } /* end else if */
+ else {
+ dat = (jbyte *)ENVPTR->GetPrimitiveArrayCritical(ENVPAR image,&bb);
+
+ if (dat == NULL) {
+ h4JNIFatalError(env, "DF24getimage: image not pinned");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(filename, hdf_file);
+
+ if (hdf_file != NULL) {
+ rval = DF24getimage(hdf_file, (VOIDP) dat, (int32) width, (int32) height);
+
+ UNPIN_JAVA_STRING(filename, hdf_file);
+
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ } else {
+ copyMode = 0;
+ }
+ }
+
+ ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR image,dat,copyMode);
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jshort JNICALL
+Java_hdf_hdflib_HDFLibrary_DF24lastref
+(JNIEnv *env, jclass clss)
+{
+ return ((short)DF24lastref());
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DF24readref
+(JNIEnv *env, jclass clss, jstring filename, jshort ref)
+{
+ int retVal;
+ const char *filePtr;
+
+ PIN_JAVA_STRING(filename, filePtr);
+
+ if (filePtr != NULL) {
+ retVal = DF24readref(filePtr, (short)ref);
+
+ UNPIN_JAVA_STRING(filename, filePtr);
+
+ if (retVal == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+ }
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DF24restart
+(JNIEnv *env, jclass clss)
+{
+ int retVal;
+ retVal = DF24restart();
+
+ if (retVal) {
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_DF24nimages
+(JNIEnv *env, jclass clss, jstring hdfFile)
+{
+ const char *hdf_file;
+ intn retVal;
+
+ PIN_JAVA_STRING(hdfFile, hdf_file);
+
+ if (hdf_file != NULL) {
+ retVal = DF24nimages(hdf_file);
+
+ UNPIN_JAVA_STRING(hdfFile, hdf_file);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return (jint)retVal;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DF24reqil
+(JNIEnv *env, jclass clss, jint interlace)
+{
+ intn retVal;
+
+ retVal = DF24reqil((intn)interlace);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DF24addimage
+(JNIEnv *env, jclass clss, jstring filename, jbyteArray image, jint width, jint height)
+{
+ intn rval;
+ const char *f;
+ int copyMode;
+ jbyte *dat;
+ jboolean bb;
+
+ copyMode = JNI_ABORT;
+
+ if (image == NULL) {
+ h4nullArgument(env, "DF24addimage: image is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR image) < 1) {
+ h4badArgument(env, "DF24addimage: no image data");
+ } /* end else if */
+ else {
+ dat = ENVPTR->GetByteArrayElements(ENVPAR image,&bb);
+
+ if (dat == NULL) {
+ h4JNIFatalError(env, "DF24addimage: image not pinned");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(filename, f);
+
+ if (f != NULL) {
+ rval = DF24addimage((char *)f, (VOIDP) dat, (int32) width, (int32) height);
+
+ UNPIN_JAVA_STRING(filename, f);
+
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ } else {
+ copyMode = 0;
+ }
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR image,dat,copyMode);
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DF24putimage
+(JNIEnv *env, jclass clss, jstring filename, jbyteArray image, jint width, jint height)
+{
+ intn rval;
+ const char *f;
+ int copyMode;
+ jbyte *dat;
+ jboolean bb;
+
+ copyMode = JNI_ABORT;
+
+ if (image == NULL) {
+ h4nullArgument(env, "DF24putimage: image is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR image) < 1) {
+ h4badArgument(env, "DF24putimage: no image data");
+ } /* end else if */
+ else {
+ dat = ENVPTR->GetByteArrayElements(ENVPAR image,&bb);
+
+ if (dat == NULL) {
+ h4JNIFatalError(env, "DF24putimage: image not pinned");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(filename, f);
+
+ if (f != NULL) {
+ rval = DF24putimage(f, (VOIDP) dat, (int32) width, (int32) height);
+
+ UNPIN_JAVA_STRING(filename, f);
+
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ } else {
+ copyMode = 0;
+ }
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR image,dat,copyMode);
+ }
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DF24setcompress
+(JNIEnv *env, jclass clss, jint type, jobject cinfo)
+{
+ intn rval;
+ comp_info cinf;
+ jboolean bval;
+
+ if (cinfo == NULL) {
+ h4nullArgument(env, "DF24setcompress: cinfo is NULL");
+ } /* end if */
+ else {
+ bval = getOldCompInfo(env, cinfo,&cinf);
+
+ if (bval == JNI_FALSE) {
+ h4JNIFatalError(env, "DF24setcompress: cinfo not pinned");
+ } /* end if */
+ else {
+ /* fill in cinf depending on the value of 'type' */
+ rval = DF24setcompress((int32) type, (comp_info *)&cinf);
+
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+ }
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DF24setdims
+(JNIEnv *env, jclass clss, jint width, jint height)
+{
+ intn rval;
+
+ rval = DF24setdims((int32) width, (int32) height);
+
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DF24setil
+(JNIEnv *env, jclass clss, jint il)
+{
+ intn rval;
+ rval = DF24setil((intn) il);
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/src/jni/hdfr8Imp.c b/java/src/jni/hdfr8Imp.c
new file mode 100644
index 0000000..68d7c4d
--- /dev/null
+++ b/java/src/jni/hdfr8Imp.c
@@ -0,0 +1,426 @@
+
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+/*
+ * This code is the C-interface called by Java programs to access the
+ * HDF 4 library.
+ *
+ * Each routine wraps a single HDF entry point, generally with the
+ * analogous arguments and return codes.
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+#include "hdf.h"
+#include "jni.h"
+#include "h4jni.h"
+
+extern jboolean
+getOldCompInfo
+(JNIEnv *env, jobject ciobj, comp_info *cinf);
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DFR8getdims
+(JNIEnv *env, jclass clss, jstring filename, jintArray argv, jbooleanArray isp)
+{
+ intn rval;
+ const char *hdf_file;
+ int ispal;
+ int copyMode;
+ jint *theArgs;
+ jboolean *theB;
+ jboolean bb;
+
+ copyMode = JNI_ABORT;
+
+ if (argv == NULL) {
+ h4nullArgument(env, "DFR8getdims: output array argv is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR argv) < 2) {
+ h4badArgument(env, "DFR8getdims: argv output array < order 2");
+ } /* end else if */
+ else {
+ theArgs = ENVPTR->GetIntArrayElements(ENVPAR argv,&bb);
+
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "DFR8getdims: argv not pinned");
+ } /* end if */
+ else {
+ if (isp == NULL) {
+ h4nullArgument(env, "DFR8getdims: output array isp is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR isp) < 1) {
+ h4badArgument(env, "DFR8getdims: output array isp < order 1");
+ } /* end else if */
+ else {
+ theB = ENVPTR->GetBooleanArrayElements(ENVPAR isp,&bb);
+
+ if (theB == NULL) {
+ h4JNIFatalError(env, "DFR8getdims: isp not pinned");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(filename, hdf_file);
+
+ if (hdf_file != NULL) {
+ /* get image dimension information */
+ rval = DFR8getdims(hdf_file, (int32 *)&(theArgs[0]),
+ (int32 *)&(theArgs[1]), (intn *)&ispal);
+
+ UNPIN_JAVA_STRING(filename, hdf_file);
+
+ if (rval == FAIL) {
+ theB[0] = JNI_FALSE;
+
+ CALL_ERROR_CHECK();
+ }
+ else {
+ copyMode = 0;
+
+ if (ispal) {
+ theB[0] = JNI_TRUE;
+ }
+ else {
+ theB[0] = JNI_FALSE;
+ }
+ } /* end else */
+ }
+
+ ENVPTR->ReleaseBooleanArrayElements(ENVPAR isp,theB,copyMode);
+ } /* end else */
+ } /* end else */
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv,theArgs,copyMode);
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DFR8getimage
+(JNIEnv *env, jclass clss, jstring filename, jbyteArray image, jint width, jint height, jbyteArray pallete)
+{
+ const char *hdf_file;
+ int copyMode;
+ intn rval;
+ jbyte *dat;
+ jbyte *p;
+ jboolean bb;
+
+ copyMode = JNI_ABORT;
+
+ if (image == NULL) {
+ h4nullArgument(env, "DFR8getimage: output array image is NULL");
+ } /* end if */
+ else {
+ dat = (jbyte *)ENVPTR->GetPrimitiveArrayCritical(ENVPAR image,&bb);
+
+ if (dat == NULL) {
+ h4JNIFatalError(env, "DFR8getimage: image not pinned");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(filename, hdf_file);
+
+ if (hdf_file != NULL) {
+ if (pallete == NULL) {
+ rval = DFR8getimage((char *)hdf_file, (uint8 *) dat, (int32) width, (int32) height,
+ (uint8 *)NULL);
+ }
+ else {
+ p = ENVPTR->GetByteArrayElements(ENVPAR pallete,&bb);
+ rval = DFR8getimage((char *)hdf_file, (uint8 *) dat, (int32) width, (int32) height,
+ (uint8 *)p);
+ }
+
+ UNPIN_JAVA_STRING(filename, hdf_file);
+
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+ else {
+ copyMode = 0;
+ } /* end else */
+
+ ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR image,dat,copyMode);
+ if (pallete != NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR pallete,p,copyMode);
+ }
+ }
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jshort JNICALL
+Java_hdf_hdflib_HDFLibrary_DFR8lastref
+(JNIEnv *env, jclass clss)
+{
+ return ((short)DFR8lastref());
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DFR8readref
+(JNIEnv *env, jclass clss, jstring filename, jshort ref)
+{
+ int retVal;
+ const char *filePtr;
+
+ PIN_JAVA_STRING(filename, filePtr);
+
+ if (filePtr != NULL) {
+ retVal = DFR8readref(filePtr, (short)ref);
+
+ UNPIN_JAVA_STRING(filename, filePtr);
+
+ if (retVal == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+ }
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DFR8restart
+(JNIEnv *env, jclass clss)
+{
+ int retVal;
+ retVal = DFR8restart();
+
+ if (retVal) {
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_DFR8nimages
+(JNIEnv *env, jclass clss, jstring hdfFile)
+{
+ const char *hdf_file;
+ int retVal;
+
+ PIN_JAVA_STRING(hdfFile, hdf_file);
+
+ if (hdf_file != NULL) {
+ retVal = DFR8nimages(hdf_file);
+
+ UNPIN_JAVA_STRING(hdfFile, hdf_file);
+
+ if (retVal == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+ }
+
+ return(retVal);
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DFR8addimage
+(JNIEnv *env, jclass clss, jstring filename, jbyteArray image, jint width, jint height, jshort compress)
+{
+ intn rval;
+ const char *f;
+ jbyte *dat;
+ jboolean bb;
+
+ if (image == NULL) {
+ h4nullArgument(env, "DFR8addimage: image is NULL");
+ } /* end if */
+ else {
+ dat = ENVPTR->GetByteArrayElements(ENVPAR image,&bb);
+
+ if (dat == NULL) {
+ h4JNIFatalError(env, "DFR8addimage: image not pinned");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(filename, f);
+
+ if (f != NULL) {
+ rval = DFR8addimage((char *)f, (VOIDP) dat, (int32) width, (int32) height,
+ (uint16)compress);
+
+ UNPIN_JAVA_STRING(filename, f);
+
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR image,dat,JNI_ABORT);
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DFR8putimage
+(JNIEnv *env, jclass clss, jstring filename, jbyteArray image, jint width, jint height, jint compress)
+{
+ intn rval;
+ const char *f;
+ jbyte *dat;
+ jboolean bb;
+
+ if (image == NULL) {
+ h4nullArgument(env, "DFR8putimage: image is NULL");
+ } /* end if */
+ else {
+ dat = ENVPTR->GetByteArrayElements(ENVPAR image,&bb);
+
+ if (dat == NULL) {
+ h4JNIFatalError(env, "DFR8putimage: image not pinned");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(filename, f);
+
+ if (f != NULL) {
+ rval = DFR8putimage((char *)f, (VOIDP) dat, (int32) width, (int32) height,
+ (uint16) compress);
+
+ UNPIN_JAVA_STRING(filename, f);
+
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR image,dat,JNI_ABORT);
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DFR8setcompress
+(JNIEnv *env, jclass clss, jint type, jobject cinfo)
+{
+ intn rval;
+ comp_info cinf;
+
+ if (cinfo == NULL) {
+ h4nullArgument(env, "DFR8setcompress: cinfo is NULL");
+ } /* end if */
+ else {
+ if (getOldCompInfo(env, cinfo,&cinf) == JNI_FALSE) {
+ h4JNIFatalError(env, "DFR8setcompress: cinfo not pinned");
+ } /* end if */
+ else {
+ /* fill in cinf depending on the value of 'type' */
+ rval = DFR8setcompress((int32) type, (comp_info *)&cinf);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DFR8getpalref
+(JNIEnv *env, jclass clss, jshortArray palref)
+{
+ int rval;
+ short *theArgs;
+ jboolean bb;
+
+ if (palref == NULL) {
+ h4nullArgument(env, "DFR8getpalref: output array palref is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR palref) < 1) {
+ h4badArgument(env, "DFR8getpalref: output array palref < order 1");
+ } /* end else if */
+ else {
+ theArgs = ENVPTR->GetShortArrayElements(ENVPAR palref,&bb);
+
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "DFR8getpalref: palref not pinned");
+ } /* end if */
+ else {
+ rval = DFR8getpalref((uint16 *)&(theArgs[0]));
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseShortArrayElements(ENVPAR palref,theArgs,JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseShortArrayElements(ENVPAR palref,theArgs,0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DFR8setpalette
+(JNIEnv *env, jclass clss, jbyteArray palette)
+{
+ int rval;
+ jbyte *p;
+ jboolean bb;
+
+ if (palette == NULL) {
+ p = NULL;
+ }
+ else {
+ p = ENVPTR->GetByteArrayElements(ENVPAR palette,&bb);
+ }
+
+ rval = DFR8setpalette((uint8 *)p);
+
+ if (p) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR palette,p,JNI_ABORT);
+ }
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_DFR8writeref
+(JNIEnv *env, jclass clss, jstring filename, jshort ref)
+{
+ int retVal;
+ const char *filePtr;
+
+ PIN_JAVA_STRING(filename, filePtr);
+
+ if (filePtr != NULL) {
+ retVal = DFR8writeref(filePtr, (short)ref);
+
+ UNPIN_JAVA_STRING(filename, filePtr);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/src/jni/hdfsdsImp.c b/java/src/jni/hdfsdsImp.c
new file mode 100644
index 0000000..e3a8e25
--- /dev/null
+++ b/java/src/jni/hdfsdsImp.c
@@ -0,0 +1,1980 @@
+
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+/*
+ * This code is the C-interface called by Java programs to access the
+ * HDF 4 library.
+ *
+ * Each routine wraps a single HDF entry point, generally with the
+ * analogous arguments and return codes.
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf.h"
+#include "mfhdf.h"
+#include "jni.h"
+#include "h4jni.h"
+
+extern jboolean makeChunkInfo(JNIEnv *env, jobject chunkobj, int32 flgs, HDF_CHUNK_DEF *cinf);
+extern jboolean getNewCompInfo(JNIEnv *env, jobject ciobj, comp_info *cinf);
+extern jboolean setNewCompInfo(JNIEnv *env, jobject ciobj, comp_coder_t coder, comp_info *cinf);
+extern jboolean getChunkInfo(JNIEnv *env, jobject chunkobj, HDF_CHUNK_DEF *cinf);
+
+JNIEXPORT jlong JNICALL
+Java_hdf_hdflib_HDFLibrary_SDstart
+(JNIEnv *env, jclass clss, jstring filename, jint access)
+{
+ int32 sdid;
+ const char *fname;
+
+ PIN_JAVA_STRING(filename, fname);
+ if (fname != NULL) {
+ sdid = SDstart(fname, (int32)access);
+
+ UNPIN_JAVA_STRING(filename, fname);
+ if (sdid < 0)
+ CALL_ERROR_CHECK();
+ }
+
+ return (jlong)sdid;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDend
+(JNIEnv *env, jclass clss, jlong sdid)
+{
+ intn rval;
+ int32 id = (int32)sdid;
+
+ rval = SDend(id);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_SDfileinfo
+(JNIEnv *env, jclass clss, jlong sdid, jintArray argv)
+{
+ intn rval;
+ jint *theArgs;
+ jboolean isCopy; /* dummy */
+ int32 id = (int32)sdid;
+
+ if (argv == NULL) {
+ h4nullArgument(env, "GRfileinfo: argv is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR argv) < 2) {
+ h4badArgument(env, "GRfileinfo: argv input array < order 2");
+ } /* end else if */
+ else {
+ theArgs = ENVPTR->GetIntArrayElements(ENVPAR argv, &isCopy);
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "GRfileinfo: argv not pinned");
+ } /* end if */
+ else {
+ rval = SDfileinfo(id, (int32 *)&(theArgs[0]), (int32 *)&(theArgs[1]));
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv, theArgs, JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv, theArgs, 0);
+ }
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jlong JNICALL
+Java_hdf_hdflib_HDFLibrary_SDselect
+(JNIEnv *env, jclass clss, jlong sdid, jint index)
+{
+ int32 rval;
+ int32 id = (int32)sdid;
+
+ rval = (jlong)SDselect(id, (int32) index);
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+
+ return (jlong)rval;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_SDnametoindex
+(JNIEnv *env, jclass clss, jlong sdid, jstring name)
+{
+ intn rval;
+ const char *str;
+ int32 id = (int32)sdid;
+
+ PIN_JAVA_STRING(name, str);
+ if (str != NULL) {
+ rval = SDnametoindex(id, str);
+
+ UNPIN_JAVA_STRING(name, str);
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+ }
+
+ return (jint)rval;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDgetinfo
+(JNIEnv *env, jclass clss, jlong sdsid, jobjectArray name, jintArray dimsizes, jintArray argv)
+{
+ intn rval;
+ int32 *dims;
+ int32 *theArgs;
+ jclass jc;
+ jstring str;
+ jboolean isCopy;
+ jboolean bb = 0;
+ jboolean abb = 0;
+ jobject o;
+ char *cname;
+ int32 id = (int32)sdsid;
+
+ /* variables of infomation */
+
+ cname = (char *)HDmalloc(MAX_NC_NAME+1);
+ if (cname == NULL) {
+ h4outOfMemory(env, "SDgetinfo");
+ }
+ else {
+ if (name == NULL) {
+ h4nullArgument(env, "SDgetinfo: name is NULL");
+ } /* end if */
+ else if (dimsizes == NULL) {
+ h4nullArgument(env, "SDgetinfo: dimsizes is NULL");
+ } /* end if */
+ else if (argv == NULL) {
+ h4nullArgument(env, "SDgetinfo: argv is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR argv) < 3) {
+ h4badArgument(env, "SDgetinfo: argv input array < order 3");
+ } /* end else if */
+ else {
+ dims = (int32 *)ENVPTR->GetIntArrayElements(ENVPAR dimsizes, &isCopy);
+ if (dims == NULL) {
+ h4JNIFatalError(env, "SDgetinfo: dimsizes not pinned");
+ } /* end if */
+ else {
+ theArgs = (int32 *)ENVPTR->GetIntArrayElements(ENVPAR argv, &isCopy);
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "SDgetinfo: argv not pinned");
+ } /* end if */
+ else {
+ rval = SDgetinfo(id, cname, &(theArgs[0]), dims,
+ &(theArgs[1]), &(theArgs[2]));
+
+ if (rval == FAIL) {
+ abb = JNI_ABORT;
+ bb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ else {
+ cname[MAX_NC_NAME] = '\0';
+ str = ENVPTR->NewStringUTF(ENVPAR cname);
+ if (str != NULL) {
+ o = ENVPTR->GetObjectArrayElement(ENVPAR name, 0);
+ if (o == NULL) {
+ bb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ else {
+ jc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (jc == NULL) {
+ bb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ else {
+ if (ENVPTR->IsInstanceOf(ENVPAR o, jc) == JNI_FALSE) {
+ bb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ ENVPTR->SetObjectArrayElement(ENVPAR name, 0, (jobject)str);
+ }
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ }
+ }
+ }
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv, theArgs, abb);
+
+ } /* end else */
+ ENVPTR->ReleaseIntArrayElements(ENVPAR dimsizes, dims, bb);
+
+ } /* end else */
+ } /* end else */
+
+ HDfree(cname);
+ }
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDreaddata
+(JNIEnv *env, jclass clss, jlong sdsid, jintArray start, jintArray stride, jintArray count, jbyteArray data)
+{
+ intn rval;
+ int32 *strt;
+ int32 *strd;
+ int32 *cnt;
+ jbyte *d;
+ jboolean bb;
+ jboolean cbb = 0;
+ int32 id = (int32)sdsid;
+
+ if (data == NULL) {
+ h4nullArgument(env, "SDreaddata: data is NULL");
+ } /* end if */
+ else if (start == NULL) {
+ h4nullArgument(env, "SDreaddata: start is NULL");
+ } /* end if */
+ else if (count == NULL) {
+ h4nullArgument(env, "SDreaddata: count is NULL");
+ } /* end if */
+ else {
+ d = (jbyte *)ENVPTR->GetPrimitiveArrayCritical(ENVPAR data, &bb);
+
+ strt = ENVPTR->GetIntArrayElements(ENVPAR start, &bb);
+ if (strt == NULL) {
+ h4JNIFatalError(env, "SDreaddata: start not pinned");
+ } /* end if */
+ else {
+ cnt = ENVPTR->GetIntArrayElements(ENVPAR count, &bb);
+ if (cnt == NULL) {
+ h4JNIFatalError(env, "SDreaddata: count not pinned");
+ } /* end if */
+ else {
+ if (stride == NULL) {
+ strd = NULL;
+ }
+ else {
+ strd = ENVPTR->GetIntArrayElements(ENVPAR stride, &bb);
+ }
+ rval = SDreaddata(id, strt, strd, cnt, d);
+
+ if (stride != NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR stride, strd, JNI_ABORT);
+ }
+ ENVPTR->ReleaseIntArrayElements(ENVPAR count, cnt, JNI_ABORT);
+
+ if (rval == FAIL) {
+ cbb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ } /* end else */
+ ENVPTR->ReleaseIntArrayElements(ENVPAR start, strt, JNI_ABORT);
+ }
+ ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR data, d, cbb);
+ } /* end else */
+ return JNI_TRUE;
+}
+
+/*
+ ////////////////////////////////////////////////////////////////////
+ // //
+ // New APIs for read data from library //
+ // Using SDreaddata(..., Object buf) requires function calls //
+ // theArray.emptyBytes() and theArray.arrayify(buf), which //
+ // triples the actual memory needed by the data set. //
+ // Using the following APIs solves the problem. //
+ // //
+ ////////////////////////////////////////////////////////////////////
+*/
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDreaddata_1short
+(JNIEnv *env, jclass clss, jlong sdsid, jintArray start, jintArray stride, jintArray count, jshortArray data)
+{
+ intn rval;
+ int32 *strt;
+ int32 *strd;
+ int32 *cnt;
+ jshort *d;
+ jboolean bb;
+ jboolean cbb = 0;
+ int32 id = (int32)sdsid;
+
+ if (data == NULL) {
+ h4nullArgument(env, "SDreaddata: data is NULL");
+ } /* end if */
+ else if (start == NULL) {
+ h4nullArgument(env, "SDreaddata: start is NULL");
+ } /* end if */
+ else if (count == NULL) {
+ h4nullArgument(env, "SDreaddata: count is NULL");
+ } /* end if */
+ else {
+ d = (jshort *)ENVPTR->GetPrimitiveArrayCritical(ENVPAR data, &bb);
+
+ strt = ENVPTR->GetIntArrayElements(ENVPAR start, &bb);
+ if (strt == NULL) {
+ h4JNIFatalError(env, "SDreaddata: start not pinned");
+ } /* end if */
+ else {
+ cnt = ENVPTR->GetIntArrayElements(ENVPAR count, &bb);
+ if (cnt == NULL) {
+ h4JNIFatalError(env, "SDreaddata: count not pinned");
+ } /* end if */
+ else {
+ if (stride == NULL) {
+ strd = NULL;
+ }
+ else {
+ strd = ENVPTR->GetIntArrayElements(ENVPAR stride, &bb);
+ }
+ rval = SDreaddata(id, strt, strd, cnt, d);
+
+ if (stride != NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR stride, strd, JNI_ABORT);
+ }
+ ENVPTR->ReleaseIntArrayElements(ENVPAR count, cnt, JNI_ABORT);
+
+ if (rval == FAIL) {
+ cbb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ } /* end else */
+ ENVPTR->ReleaseIntArrayElements(ENVPAR start, strt, JNI_ABORT);
+ }
+ ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR data, d, cbb);
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDreaddata_1int
+(JNIEnv *env, jclass clss, jlong sdsid, jintArray start, jintArray stride, jintArray count, jintArray data)
+{
+ intn rval;
+ int32 *strt;
+ int32 *strd;
+ int32 *cnt;
+ jint *d;
+ jboolean bb;
+ jboolean cbb = 0;
+ int32 id = (int32)sdsid;
+
+ if (data == NULL) {
+ h4nullArgument(env, "SDreaddata: data is NULL");
+ } /* end if */
+ else if (start == NULL) {
+ h4nullArgument(env, "SDreaddata: start is NULL");
+ } /* end if */
+ else if (count == NULL) {
+ h4nullArgument(env, "SDreaddata: count is NULL");
+ } /* end if */
+ else {
+ d = (jint *)ENVPTR->GetPrimitiveArrayCritical(ENVPAR data, &bb);
+
+ strt = ENVPTR->GetIntArrayElements(ENVPAR start, &bb);
+ if (strt == NULL) {
+ h4JNIFatalError(env, "SDreaddata: start not pinned");
+ } /* end if */
+ else {
+ cnt = ENVPTR->GetIntArrayElements(ENVPAR count, &bb);
+ if (cnt == NULL) {
+ h4JNIFatalError(env, "SDreaddata: count not pinned");
+ } /* end if */
+ else {
+ if (stride == NULL) {
+ strd = NULL;
+ }
+ else {
+ strd = ENVPTR->GetIntArrayElements(ENVPAR stride, &bb);
+ }
+ rval = SDreaddata(id, strt, strd, cnt, d);
+
+ if (stride != NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR stride, strd, JNI_ABORT);
+ }
+ ENVPTR->ReleaseIntArrayElements(ENVPAR count, cnt, JNI_ABORT);
+
+ if (rval == FAIL) {
+ cbb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ } /* end else */
+ ENVPTR->ReleaseIntArrayElements(ENVPAR start, strt, JNI_ABORT);
+ }
+ ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR data, d, cbb);
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDreaddata_1long
+(JNIEnv *env, jclass clss, jlong sdsid, jintArray start, jintArray stride, jintArray count, jlongArray data)
+{
+ intn rval;
+ int32 *strt;
+ int32 *strd;
+ int32 *cnt;
+ jlong *d;
+ jboolean bb;
+ jboolean cbb = 0;
+ int32 id = (int32)sdsid;
+
+ if (data == NULL) {
+ h4nullArgument(env, "SDreaddata: data is NULL");
+ } /* end if */
+ else if (start == NULL) {
+ h4nullArgument(env, "SDreaddata: start is NULL");
+ } /* end if */
+ else if (count == NULL) {
+ h4nullArgument(env, "SDreaddata: count is NULL");
+ } /* end if */
+ else {
+ d = (jlong *)ENVPTR->GetPrimitiveArrayCritical(ENVPAR data, &bb);
+
+ strt = ENVPTR->GetIntArrayElements(ENVPAR start, &bb);
+ if (strt == NULL) {
+ h4JNIFatalError(env, "SDreaddata: start not pinned");
+ } /* end if */
+ else {
+ cnt = ENVPTR->GetIntArrayElements(ENVPAR count, &bb);
+ if (cnt == NULL) {
+ h4JNIFatalError(env, "SDreaddata: count not pinned");
+ } /* end if */
+ else {
+ if (stride == NULL) {
+ strd = NULL;
+ }
+ else {
+ strd = ENVPTR->GetIntArrayElements(ENVPAR stride, &bb);
+ }
+ rval = SDreaddata(id, strt, strd, cnt, d);
+
+ if (stride != NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR stride, strd, JNI_ABORT);
+ }
+ ENVPTR->ReleaseIntArrayElements(ENVPAR count, cnt, JNI_ABORT);
+
+ if (rval == FAIL) {
+ cbb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ } /* end else */
+ ENVPTR->ReleaseIntArrayElements(ENVPAR start, strt, JNI_ABORT);
+ }
+ ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR data, d, cbb);
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDreaddata_1float
+(JNIEnv *env, jclass clss, jlong sdsid, jintArray start, jintArray stride, jintArray count, jfloatArray data)
+{
+ intn rval;
+ int32 *strt;
+ int32 *strd;
+ int32 *cnt;
+ jfloat *d;
+ jboolean bb;
+ jboolean cbb = 0;
+ int32 id = (int32)sdsid;
+
+ if (data == NULL) {
+ h4nullArgument(env, "SDreaddata: data is NULL");
+ } /* end if */
+ else if (start == NULL) {
+ h4nullArgument(env, "SDreaddata: start is NULL");
+ } /* end if */
+ else if (count == NULL) {
+ h4nullArgument(env, "SDreaddata: count is NULL");
+ } /* end if */
+ else {
+ d = (jfloat *)ENVPTR->GetPrimitiveArrayCritical(ENVPAR data, &bb);
+
+ strt = ENVPTR->GetIntArrayElements(ENVPAR start, &bb);
+ if (strt == NULL) {
+ h4JNIFatalError(env, "SDreaddata: start not pinned");
+ } /* end if */
+ else {
+ cnt = ENVPTR->GetIntArrayElements(ENVPAR count, &bb);
+ if (cnt == NULL) {
+ h4JNIFatalError(env, "SDreaddata: count not pinned");
+ } /* end if */
+ else {
+ if (stride == NULL) {
+ strd = NULL;
+ }
+ else {
+ strd = ENVPTR->GetIntArrayElements(ENVPAR stride, &bb);
+ }
+ rval = SDreaddata(id, strt, strd, cnt, d);
+
+ if (stride != NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR stride, strd, JNI_ABORT);
+ }
+ ENVPTR->ReleaseIntArrayElements(ENVPAR count, cnt, JNI_ABORT);
+
+ if (rval == FAIL) {
+ cbb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ } /* end else */
+ ENVPTR->ReleaseIntArrayElements(ENVPAR start, strt, JNI_ABORT);
+ }
+ ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR data, d, cbb);
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDreaddata_1double
+(JNIEnv *env, jclass clss, jlong sdsid, jintArray start, jintArray stride, jintArray count, jdoubleArray data)
+{
+ intn rval;
+ int32 *strt;
+ int32 *strd;
+ int32 *cnt;
+ jdouble *d;
+ jboolean bb;
+ jboolean cbb = 0;
+ int32 id = (int32)sdsid;
+
+ if (data == NULL) {
+ h4nullArgument(env, "SDreaddata: data is NULL");
+ } /* end if */
+ else if (start == NULL) {
+ h4nullArgument(env, "SDreaddata: start is NULL");
+ } /* end if */
+ else if (count == NULL) {
+ h4nullArgument(env, "SDreaddata: count is NULL");
+ } /* end if */
+ else {
+ d = (jdouble *)ENVPTR->GetPrimitiveArrayCritical(ENVPAR data, &bb);
+
+ strt = ENVPTR->GetIntArrayElements(ENVPAR start, &bb);
+ if (strt == NULL) {
+ h4JNIFatalError(env, "SDreaddata: start not pinned");
+ } /* end if */
+ else {
+ cnt = ENVPTR->GetIntArrayElements(ENVPAR count, &bb);
+ if (cnt == NULL) {
+ h4JNIFatalError(env, "SDreaddata: count not pinned");
+ } /* end if */
+ else {
+ if (stride == NULL) {
+ strd = NULL;
+ }
+ else {
+ strd = ENVPTR->GetIntArrayElements(ENVPAR stride, &bb);
+ }
+ rval = SDreaddata(id, strt, strd, cnt, d);
+
+ if (stride != NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR stride, strd, JNI_ABORT);
+ }
+ ENVPTR->ReleaseIntArrayElements(ENVPAR count, cnt, JNI_ABORT);
+
+ if (rval == FAIL) {
+ cbb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ } /* end else */
+ ENVPTR->ReleaseIntArrayElements(ENVPAR start, strt, JNI_ABORT);
+ }
+ ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR data, d, cbb);
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDendaccess
+(JNIEnv *env, jclass clss, jlong sdsid)
+{
+ intn rval;
+ int32 id = (int32)sdsid;
+
+ rval = SDendaccess(id);
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jlong JNICALL
+Java_hdf_hdflib_HDFLibrary_SDgetdimid
+(JNIEnv *env, jclass clss, jlong sdsid, jint index)
+{
+ int32 rval;
+ int32 id = (int32)sdsid;
+
+ rval = SDgetdimid(id, (intn)index);
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+
+ return (jlong)rval;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDdiminfo
+(JNIEnv *env, jclass clss, jlong dimid, jobjectArray dimname, jintArray argv)
+{
+ intn rval;
+ jclass jc;
+ jstring rstring;
+ jint *theArgs;
+ jboolean bb;
+ jobject o;
+ char str[256]; /* what is the correct constant??? */
+
+ if (dimname == NULL) {
+ h4nullArgument(env, "SDdiminfo: dimname is NULL");
+ } /* end if */
+ else if (argv == NULL) {
+ h4nullArgument(env, "SDdiminfo: argv is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR argv) < 3) {
+ h4badArgument(env, "SDdiminfo: argv input array < order 3");
+ } /* end else if */
+ else {
+ theArgs = ENVPTR->GetIntArrayElements(ENVPAR argv, &bb);
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "SDdiminfo: argv not pinned");
+ } /* end if */
+ else {
+ rval = SDdiminfo((int32)dimid, (char *)str, (int32 *)&(theArgs[0]),
+ (int32 *)&(theArgs[1]), (int32 *)&(theArgs[2]));
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv, theArgs, JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv, theArgs, 0);
+ if (str != NULL) {
+ str[255] = '\0';
+ rstring = ENVPTR->NewStringUTF(ENVPAR str);
+ if (rstring != NULL) {
+ o = ENVPTR->GetObjectArrayElement(ENVPAR dimname, 0);
+ if (o == NULL) {
+ CALL_ERROR_CHECK();
+ }
+ else {
+ jc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (jc == NULL) {
+ CALL_ERROR_CHECK();
+ }
+ else {
+ bb = ENVPTR->IsInstanceOf(ENVPAR o, jc);
+ if (bb == JNI_FALSE) {
+ CALL_ERROR_CHECK();
+ }
+ else
+ ENVPTR->SetObjectArrayElement(ENVPAR dimname, 0, (jobject)rstring);
+ }
+ }
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ }
+ else
+ h4JNIFatalError(env, "SDdiminfo: can not create string");
+ }
+ }
+ } /* end else */
+ }
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_SDidtoref
+(JNIEnv *env, jclass clss, jlong sdsid)
+{
+ int32 rval;
+ int32 id = (int32)sdsid;
+
+ rval = SDidtoref(id);
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+
+ return (jint)rval;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_SDreftoindex
+(JNIEnv *env, jclass clss, jlong sdid, jint ref)
+{
+ int32 rval;
+ int32 id = (int32)sdid;
+
+ rval = SDreftoindex(id, (int32)ref);
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+
+ return (jint)rval;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDattrinfo
+(JNIEnv *env, jclass clss, jlong sdsid, jint index, jobjectArray name, jintArray argv)
+{
+ intn rval;
+ jclass jc;
+ jstring rstring;
+ jint *theArgs;
+ jboolean bb;
+ jobject o;
+ char str[256]; /* what is the correct constant??? */
+ int32 id = (int32)sdsid;
+
+ if (name == NULL) {
+ h4nullArgument(env, "SDattrinfo: name is NULL");
+ } /* end if */
+ else if (argv == NULL) {
+ h4nullArgument(env, "SDattrinfo: argv is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR argv) < 2) {
+ h4badArgument(env, "SDattrinfo: argv input array < order 2");
+ } /* end else if */
+ else {
+ theArgs = ENVPTR->GetIntArrayElements(ENVPAR argv, &bb);
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "SDattrinfo: argv not pinned");
+ } /* end if */
+ else {
+ rval = SDattrinfo(id, (int32)index,
+ (char *)str, (int32 *)&(theArgs[0]), (int32 *)&(theArgs[1]));
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv, theArgs, JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv, theArgs, 0);
+ if (str != NULL) {
+ str[255] = '\0';
+ rstring = ENVPTR->NewStringUTF(ENVPAR str);
+ o = ENVPTR->GetObjectArrayElement(ENVPAR name, 0);
+ if (o == NULL) {
+ CALL_ERROR_CHECK();
+ }
+ jc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (jc == NULL) {
+ CALL_ERROR_CHECK();
+ }
+ bb = ENVPTR->IsInstanceOf(ENVPAR o, jc);
+ if (bb == JNI_FALSE) {
+ CALL_ERROR_CHECK();
+ }
+ ENVPTR->SetObjectArrayElement(ENVPAR name, 0, (jobject)rstring);
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ }
+ }
+ } /* end else */
+ }
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDreadattr
+(JNIEnv *env, jclass clss, jlong sdsid, jint index, jbyteArray dat)
+{
+ intn rval;
+ jbyte *arr;
+ jboolean bb;
+ int32 id = (int32)sdsid;
+
+ if (dat == NULL) {
+ h4nullArgument(env, "SDreadattr: dat is NULL");
+ } /* end if */
+ else {
+ arr = ENVPTR->GetByteArrayElements(ENVPAR dat, &bb);
+ if (arr == NULL) {
+ h4JNIFatalError(env, "SDreadattr: dat not pinned");
+ } /* end if */
+ else {
+ rval = SDreadattr(id, (int32)index, (VOIDP)arr);
+ if (rval == FAIL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR dat, arr, JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR dat, arr, 0);
+ }
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_SDfindattr
+(JNIEnv *env, jclass clss, jlong sdsid, jstring name)
+{
+ int32 rval;
+ const char *str;
+ int32 id = (int32)sdsid;
+
+ PIN_JAVA_STRING(name, str);
+ if (str != NULL) {
+ rval = GRfindattr(id, str);
+
+ UNPIN_JAVA_STRING(name, str);
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+ }
+
+ return (jint)rval;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDiscoordvar
+(JNIEnv *env, jclass clss, jlong sdsid)
+{
+ intn rval;
+ int32 id = (int32)sdsid;
+
+ rval = SDiscoordvar(id);
+ if (rval <= 0)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDgetcal
+(JNIEnv *env, jclass clss, jlong sdsid, jdoubleArray argv, jintArray nt)
+{
+ intn rval;
+ jdouble *theArgs;
+ jint *theNT;
+ jboolean bb;
+ jboolean cbb = 0;
+ int32 id = (int32)sdsid;
+
+ if (nt == NULL) {
+ h4nullArgument(env, "SDgetcal: nt is NULL");
+ } /* end if */
+ else if (argv == NULL) {
+ h4nullArgument(env, "SDgetcal: argv is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR argv) < 4) {
+ h4badArgument(env, "SDgetcal: argv input array < order 4");
+ } /* end else if */
+ else {
+ theNT = ENVPTR->GetIntArrayElements(ENVPAR nt, &bb);
+ if (theNT == NULL) {
+ h4JNIFatalError(env, "SDgetcal: nt not pinned");
+ } /* end if */
+ else {
+ theArgs = ENVPTR->GetDoubleArrayElements(ENVPAR argv, &bb);
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "SDgetcal: argv not pinned");
+ } /* end if */
+ else {
+ rval = SDgetcal(id, (float64 *)&(theArgs[0]),
+ (float64 *)&(theArgs[1]), (float64 *)&(theArgs[2]),
+ (float64 *)&(theArgs[3]), (int32 *)&(theNT[0]));
+
+ if (rval == FAIL) {
+ cbb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR argv, theArgs, cbb);
+ } /* end else */
+ ENVPTR->ReleaseIntArrayElements(ENVPAR nt, theNT, cbb);
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDgetdatastrs
+(JNIEnv *env, jclass clss, jlong sdsid, jobjectArray strings, jint len)
+{
+ intn rval;
+ jclass Sjc;
+ jstring str;
+ jobject o;
+ jboolean bb;
+ jstring label,unit,format,coordsys;
+ char *labVal;
+ char *unitVal;
+ char *fmtVal;
+ char *coordsysVal;
+ int i;
+ int32 id = (int32)sdsid;
+
+ if (strings == NULL) {
+ h4nullArgument(env, "SDgetdatastrs: strings is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR strings) < 4) {
+ h4badArgument(env, "SDgetdatastrs: strings input array < order 4");
+ } /* end else if */
+ else {
+ label = (jstring)ENVPTR->GetObjectArrayElement(ENVPAR strings, 0);
+ /* allocate space */
+ if (label == NULL) {
+ labVal = NULL; /* don't read label */
+ }
+ else {
+ labVal = (char *)HDmalloc(len+1);
+ if (labVal == NULL) {
+ h4outOfMemory(env, "SDgetdatastrs");
+ }
+ }
+ ENVPTR->DeleteLocalRef(ENVPAR label);
+
+ unit = (jstring)ENVPTR->GetObjectArrayElement(ENVPAR strings, 1);
+ if (unit == NULL) {
+ unitVal = NULL;
+ }
+ else {
+ unitVal = (char *)HDmalloc(len+1);
+ if (unitVal == NULL) {
+ h4outOfMemory(env, "SDgetdatastrs");
+ }
+ }
+ ENVPTR->DeleteLocalRef(ENVPAR unit);
+
+ format = (jstring)ENVPTR->GetObjectArrayElement(ENVPAR strings, 2);
+ if (format == NULL) {
+ fmtVal = NULL;
+ }
+ else {
+ fmtVal = (char *)HDmalloc(len+1);
+ if (fmtVal == NULL) {
+ h4outOfMemory(env, "SDgetdatastrs");
+ }
+ }
+ ENVPTR->DeleteLocalRef(ENVPAR format);
+
+ coordsys = (jstring)ENVPTR->GetObjectArrayElement(ENVPAR strings, 3);
+ if (coordsys == NULL) {
+ coordsysVal = NULL;
+ }
+ else {
+ coordsysVal = (char *)HDmalloc(len+1);
+ if (coordsysVal == NULL) {
+ h4outOfMemory(env, "SDgetdatastrs");
+ }
+ }
+ ENVPTR->DeleteLocalRef(ENVPAR coordsys);
+
+ rval = SDgetdatastrs(id, labVal, unitVal, fmtVal, coordsysVal, (int32)len);
+
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+ else {
+ if (labVal != NULL) {
+ labVal[len] = '\0';
+ str = ENVPTR->NewStringUTF(ENVPAR labVal);
+ if (str != NULL) {
+ ENVPTR->SetObjectArrayElement(ENVPAR strings, 0, (jobject)str);
+ }
+ }
+ if (unitVal != NULL) {
+ unitVal[len] = '\0';
+ str = ENVPTR->NewStringUTF(ENVPAR unitVal);
+ if (str != NULL) {
+ ENVPTR->SetObjectArrayElement(ENVPAR strings, 1, (jobject)str);
+ }
+ }
+ if (fmtVal != NULL) {
+ fmtVal[len] = '\0';
+ str = ENVPTR->NewStringUTF(ENVPAR fmtVal);
+ if (str != NULL) {
+ ENVPTR->SetObjectArrayElement(ENVPAR strings, 2, (jobject)str);
+ }
+ }
+ if (coordsysVal != NULL) {
+ coordsysVal[len] = '\0';
+ str = ENVPTR->NewStringUTF(ENVPAR coordsysVal);
+ if (str != NULL) {
+ ENVPTR->SetObjectArrayElement(ENVPAR strings, 3, (jobject)str);
+ }
+ }
+ }
+
+ if (labVal != NULL) HDfree(labVal);
+ if (unitVal != NULL) HDfree(unitVal);
+ if (fmtVal != NULL) HDfree(fmtVal);
+ if (coordsysVal != NULL) HDfree(coordsysVal);
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDgetdimstrs
+(JNIEnv *env, jclass clss, jlong dimid, jobjectArray strings, jint len)
+{
+ intn rval;
+ jclass Sjc;
+ jstring str;
+ jobject o;
+ jboolean bb;
+ jstring label,unit,format,coordsys;
+ char *labVal;
+ char *unitVal;
+ char *fmtVal;
+ int i;
+
+ if (strings == NULL) {
+ h4nullArgument(env, "SDgetdimstrs: strings is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR strings) < 3) {
+ h4badArgument(env, "SDgetdimstrs: strings input array < order 3");
+ } /* end else if */
+ else {
+ label = (jstring)ENVPTR->GetObjectArrayElement(ENVPAR strings, 0);
+ /* allocate space */
+ if (label == NULL) {
+ labVal = NULL; /* don't read label */
+ }
+ else {
+ labVal = (char *)HDmalloc(len+1);
+ if (labVal == NULL) {
+ h4outOfMemory(env, "SDgetdimstrs");
+ }
+ }
+ ENVPTR->DeleteLocalRef(ENVPAR label);
+
+ unit = (jstring)ENVPTR->GetObjectArrayElement(ENVPAR strings, 1);
+ if (unit == NULL) {
+ unitVal = NULL;
+ }
+ else {
+ unitVal = (char *)HDmalloc(len+1);
+ if (unitVal == NULL) {
+ h4outOfMemory(env, "SDgetdimstrs");
+ }
+ }
+ ENVPTR->DeleteLocalRef(ENVPAR unit);
+
+ format = (jstring)ENVPTR->GetObjectArrayElement(ENVPAR strings, 2);
+ if (format == NULL) {
+ fmtVal = NULL;
+ }
+ else {
+ fmtVal = (char *)HDmalloc(len+1);
+ if (fmtVal == NULL) {
+ h4outOfMemory(env, "SDgetdimstrs");
+ }
+ }
+ ENVPTR->DeleteLocalRef(ENVPAR format);
+
+ rval = SDgetdimstrs((int32)dimid, labVal, unitVal, fmtVal, (int32)len);
+
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+ else {
+ if (labVal != NULL) {
+ labVal[len] = '\0';
+ str = ENVPTR->NewStringUTF(ENVPAR labVal);
+ if (str != NULL) {
+ ENVPTR->SetObjectArrayElement(ENVPAR strings, 0, (jobject)str);
+ }
+ }
+ if (unitVal != NULL) {
+ unitVal[len] = '\0';
+ str = ENVPTR->NewStringUTF(ENVPAR unitVal);
+ if (str != NULL) {
+ ENVPTR->SetObjectArrayElement(ENVPAR strings, 1, (jobject)str);
+ }
+ }
+ if (fmtVal != NULL) {
+ fmtVal[len] = '\0';
+ str = ENVPTR->NewStringUTF(ENVPAR fmtVal);
+ if (str != NULL) {
+ ENVPTR->SetObjectArrayElement(ENVPAR strings, 2, (jobject)str);
+ }
+ }
+ }
+
+ if (labVal != NULL) HDfree(labVal);
+ if (unitVal != NULL) HDfree(unitVal);
+ if (fmtVal != NULL) HDfree(fmtVal);
+ } /* end else */
+ return JNI_TRUE;
+}
+
+/*** note this is returning data of many types in an array of bytes.... */
+/* not sure how well this will work for java programs .... */
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDgetdimscale
+(JNIEnv *env, jclass clss, jlong dimid, jbyteArray data)
+{
+ intn rval;
+ jbyte *datVal;
+ jboolean bb;
+ jboolean cbb = 0;
+
+ if (data == NULL) {
+ h4nullArgument(env, "SDgetdimscale: data is NULL");
+ } /* end if */
+ else {
+ datVal = ENVPTR->GetByteArrayElements(ENVPAR data, &bb);
+ if (datVal == NULL) {
+ h4JNIFatalError(env, "SDgetdimscale: data not pinned");
+ } /* end if */
+ else {
+ rval = SDgetdimscale((int32)dimid, (char *)datVal);
+ if (rval == FAIL) {
+ cbb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR data, datVal, cbb);
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDgetfillvalue
+(JNIEnv *env, jclass clss, jlong sdsid, jbyteArray data)
+{
+ intn rval;
+ jbyte *datVal;
+ jboolean bb;
+ jboolean cbb = 0;
+ int32 id = (int32)sdsid;
+
+ if (data == NULL) {
+ h4nullArgument(env, "SDgetfillvalue: data is NULL");
+ } /* end if */
+ else {
+ datVal = ENVPTR->GetByteArrayElements(ENVPAR data, &bb);
+ if (datVal == NULL) {
+ h4JNIFatalError(env, "SDgetfillvalue: data not pinned");
+ } /* end if */
+ else {
+ rval = SDgetfillvalue(id, (char *)datVal);
+ if (rval == FAIL) {
+ cbb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR data, datVal, cbb);
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDgetrange
+(JNIEnv *env, jclass clss, jlong sdsid, jbyteArray max, jbyteArray min)
+{
+ intn rval;
+ jbyte *minp, *maxp;
+ jboolean bb;
+ jboolean cbb = 0;
+ int32 id = (int32)sdsid;
+
+ if (max == NULL) {
+ h4nullArgument(env, "SDgetrange: max is NULL");
+ } /* end if */
+ else if (min == NULL) {
+ h4nullArgument(env, "SDgetrange: min is NULL");
+ } /* end if */
+ else {
+ maxp = ENVPTR->GetByteArrayElements(ENVPAR max, &bb);
+ if (maxp == NULL) {
+ h4JNIFatalError(env, "SDgetrange: max not pinned");
+ } /* end if */
+ else {
+ minp = ENVPTR->GetByteArrayElements(ENVPAR min, &bb);
+ if (minp == NULL) {
+ h4JNIFatalError(env, "SDgetrange: min not pinned");
+ } /* end if */
+ else {
+ rval = SDgetrange(id, (void *)maxp, (void *)minp);
+ if (rval == FAIL) {
+ cbb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ ENVPTR->ReleaseByteArrayElements(ENVPAR min, minp, cbb);
+ } /* end else */
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR max, maxp, cbb);
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jlong JNICALL
+Java_hdf_hdflib_HDFLibrary_SDcreate
+(JNIEnv *env, jclass clss, jlong sd_id, jstring name, jlong number_type, jint rank, jintArray dimsizes)
+{
+ int32 rval;
+ const char *str;
+ jint *dims;
+ jboolean isCopy;
+ int32 id = (int32)sd_id;
+
+ if (dimsizes == NULL) {
+ h4nullArgument(env, "SDcreate: dimsizes is NULL");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(name, str);
+ if (str != NULL) {
+ dims = ENVPTR->GetIntArrayElements(ENVPAR dimsizes, &isCopy);
+ if (dims == NULL) {
+ h4JNIFatalError(env, "SDcreate: dimsizes not pinned");
+ } /* end if */
+ else {
+ rval = SDcreate(id, str, (int32)number_type, (int32)rank, (int32 *)dims);
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR dimsizes, dims, JNI_ABORT);
+
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+ } /* end else */
+ UNPIN_JAVA_STRING(name, str);
+ }
+ } /* end else */
+
+ return (jlong)rval;
+}
+
+// this API call returns false for not record and for failures
+// maybe failures should return a negative?
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDisrecord
+(JNIEnv *env, jclass clss, jlong sdsid)
+{
+ int32 rval;
+ int32 id = (int32)sdsid;
+
+ rval = SDisrecord(id);
+
+ if (rval == TRUE) {
+ return JNI_TRUE;
+ }
+ else {
+ return JNI_FALSE;
+ }
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDsetattr
+(JNIEnv *env, jclass clss, jlong s_id, jstring attr_name, jlong num_type, jint count, jbyteArray values)
+{
+ intn rval;
+ jbyte *arr;
+ const char *str;
+ jboolean bb;
+ int32 id = (int32)s_id;
+
+ if (values == NULL) {
+ h4nullArgument(env, "SDsetattr: values is NULL");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(attr_name, str);
+ if (str != NULL) {
+ arr = ENVPTR->GetByteArrayElements(ENVPAR values, &bb);
+ if (arr == NULL) {
+ h4JNIFatalError(env, "SDsetattr: values not pinned");
+ } /* end if */
+ else {
+ rval = SDsetattr(id, str, (int32)num_type, (int32)count, (VOIDP)arr);
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR values, arr, JNI_ABORT);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ } /* end else */
+ UNPIN_JAVA_STRING(attr_name, str);
+ }
+ } /* end else */
+ return JNI_TRUE;
+}
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDsetcal
+(JNIEnv *env, jclass clss, jlong sds_id, jdouble cal, jdouble cal_err, jdouble offset, jdouble offset_err, jint number_type)
+{
+ intn rval;
+ int32 id = (int32)sds_id;
+
+ rval = SDsetcal(id, (float64)cal, (float64)cal_err,
+ (float64)offset, (float64)offset_err, (int32)number_type);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDsetdatastrs
+(JNIEnv *env, jclass clss, jlong sds_id, jstring label, jstring unit, jstring format, jstring coordsys)
+{
+ intn rval;
+ const char *labstr;
+ const char *unstr;
+ const char *formstr;
+ const char *csstr;
+
+ if (label == NULL) {
+ labstr = NULL;
+ }
+ else {
+ labstr = ENVPTR->GetStringUTFChars(ENVPAR label, 0);
+ }
+ if (unit == NULL) {
+ unstr = NULL;
+ }
+ else {
+ unstr = ENVPTR->GetStringUTFChars(ENVPAR unit, 0);
+ }
+ if (format == NULL) {
+ formstr = NULL;
+ }
+ else {
+ formstr = ENVPTR->GetStringUTFChars(ENVPAR format, 0);
+ }
+ if (coordsys == NULL) {
+ csstr = NULL;
+ }
+ else {
+ csstr = ENVPTR->GetStringUTFChars(ENVPAR coordsys, 0);
+ }
+
+ rval = SDsetdatastrs((int32)sds_id, labstr, unstr, formstr, csstr);
+
+ if (labstr != NULL) ENVPTR->ReleaseStringUTFChars(ENVPAR label, labstr);
+ if (unstr != NULL) ENVPTR->ReleaseStringUTFChars(ENVPAR unit, unstr);
+ if (formstr != NULL) ENVPTR->ReleaseStringUTFChars(ENVPAR format, formstr);
+ if (csstr != NULL) ENVPTR->ReleaseStringUTFChars(ENVPAR coordsys, csstr);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDsetdimname
+(JNIEnv *env, jclass clss, jlong dim_id, jstring dim_name)
+{
+ intn rval;
+ const char *str;
+
+ PIN_JAVA_STRING(dim_name, str);
+ if (str != NULL) {
+ rval = SDsetdimname((int32)dim_id, str);
+
+ UNPIN_JAVA_STRING(dim_name, str);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDsetdimscale
+(JNIEnv *env, jclass clss, jlong dim_id, jint count, jint number_type, jbyteArray data)
+{
+ intn rval;
+ jbyte *d;
+ jboolean bb;
+
+ if (data == NULL) {
+ h4nullArgument(env, "SDsetdimscale: data is NULL");
+ } /* end if */
+ else {
+ d = ENVPTR->GetByteArrayElements(ENVPAR data, &bb);
+ if (d == NULL) {
+ h4JNIFatalError(env, "SDsetdimscale: data not pinned");
+ } /* end if */
+ else {
+ rval = SDsetdimscale((int32)dim_id, (int32)count, (int32)number_type, (VOIDP)d);
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR data, d, JNI_ABORT);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDsetdimstrs
+(JNIEnv *env, jclass clss, jlong dim_id, jstring label, jstring unit, jstring format)
+{
+ intn rval;
+ const char *labstr;
+ const char *unstr;
+ const char *formstr;
+
+ if (label == NULL) {
+ labstr = NULL;
+ }
+ else {
+ labstr = ENVPTR->GetStringUTFChars(ENVPAR label, 0);
+ }
+ if (unit == NULL) {
+ unstr = NULL;
+ }
+ else {
+ unstr = ENVPTR->GetStringUTFChars(ENVPAR unit, 0);
+ }
+ if (format == NULL) {
+ formstr = NULL;
+ }
+ else {
+ formstr = ENVPTR->GetStringUTFChars(ENVPAR format, 0);
+ }
+
+ rval = SDsetdimstrs((int32) dim_id, labstr, unstr, formstr);
+
+ if (labstr != NULL) ENVPTR->ReleaseStringUTFChars(ENVPAR label, labstr);
+ if (unstr != NULL) ENVPTR->ReleaseStringUTFChars(ENVPAR unit, unstr);
+ if (formstr != NULL) ENVPTR->ReleaseStringUTFChars(ENVPAR format, formstr);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDsetexternalfile
+(JNIEnv *env, jclass clss, jlong sds_id, jstring filename, jint offset)
+{
+ intn rval;
+ const char *f;
+ int32 id = (int32)sds_id;
+
+ PIN_JAVA_STRING(filename, f);
+ if (f != NULL) {
+ rval = SDsetexternalfile(id, f, (int32)offset);
+
+ UNPIN_JAVA_STRING(filename, f);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDsetfillvalue
+(JNIEnv *env, jclass clss, jlong sds_id, jbyteArray fill_val)
+{
+ intn rval;
+ jboolean bb;
+ jbyte *d;
+ int32 id = (int32)sds_id;
+
+ if (fill_val == NULL) {
+ h4nullArgument(env, "SDsetfillvalue: fill_val is NULL");
+ } /* end if */
+ else {
+ d = ENVPTR->GetByteArrayElements(ENVPAR fill_val, &bb);
+ if (d == NULL) {
+ h4JNIFatalError(env, "SDsetfillvalue: fill_val not pinned");
+ } /* end if */
+ else {
+ rval = SDsetfillvalue(id, (VOIDP)d);
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR fill_val, d, JNI_ABORT);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDsetrange
+(JNIEnv *env, jclass clss, jlong sdsid, jbyteArray max, jbyteArray min)
+{
+ int32 rval;
+ jboolean bb;
+ jbyte *minp, *maxp;
+ int32 id = (int32)sdsid;
+
+ if (max == NULL) {
+ h4nullArgument(env, "SDsetrange: max is NULL");
+ } /* end if */
+ else if (min == NULL) {
+ h4nullArgument(env, "SDsetrange: min is NULL");
+ } /* end if */
+ else {
+ maxp = ENVPTR->GetByteArrayElements(ENVPAR max, &bb);
+ if (maxp == NULL) {
+ h4JNIFatalError(env, "SDgetrange: max not pinned");
+ } /* end if */
+ else {
+ minp = ENVPTR->GetByteArrayElements(ENVPAR min, &bb);
+ if (minp == NULL) {
+ h4JNIFatalError(env, "SDgetrange: min not pinned");
+ } /* end if */
+ else {
+ rval = SDsetrange(id, maxp, minp);
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+ ENVPTR->ReleaseByteArrayElements(ENVPAR min, minp, JNI_ABORT);
+ } /* end else */
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR max, maxp, JNI_ABORT);
+ } /* end else */
+ } /* end else */
+ return JNI_TRUE;
+}
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDwritedata
+(JNIEnv *env, jclass clss, jlong sdsid, jintArray start, jintArray stride, jintArray edge, jbyteArray data)
+{
+ int32 rval;
+ int32 *strt;
+ int32 *strd;
+ int32 *e;
+ jbyte *d;
+ jboolean bb;
+ jboolean cbb = 0;
+ int32 id = (int32)sdsid;
+
+ if (data == NULL) {
+ h4nullArgument(env, "SDwritedata: data is NULL");
+ } /* end if */
+ else if (start == NULL) {
+ h4nullArgument(env, "SDwritedata: start is NULL");
+ } /* end if */
+ else if (edge == NULL) {
+ h4nullArgument(env, "SDwritedata: count is NULL");
+ } /* end if */
+ else {
+ d = (jbyte *)ENVPTR->GetPrimitiveArrayCritical(ENVPAR data, &bb);
+
+ strt = ENVPTR->GetIntArrayElements(ENVPAR start, &bb);
+ if (strt == NULL) {
+ h4JNIFatalError(env, "SDwritedata: start not pinned");
+ } /* end if */
+ else {
+ e = ENVPTR->GetIntArrayElements(ENVPAR edge, &bb);
+ if (e == NULL) {
+ h4JNIFatalError(env, "SDwritedata: count not pinned");
+ } /* end if */
+ else {
+ if (stride == NULL) {
+ strd = NULL;
+ }
+ else {
+ strd = ENVPTR->GetIntArrayElements(ENVPAR stride, &bb);
+ }
+ rval = SDwritedata(id, strt, strd, e, d);
+
+ if (stride != NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR stride, strd, JNI_ABORT);
+ }
+ ENVPTR->ReleaseIntArrayElements(ENVPAR edge, e, JNI_ABORT);
+
+ if (rval == FAIL) {
+ cbb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ } /* end else */
+ ENVPTR->ReleaseIntArrayElements(ENVPAR start, strt, JNI_ABORT);
+ }
+ ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR data, d, cbb);
+ } /* end else */
+ return JNI_TRUE;
+}
+
+/* new stuff for chunking */
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDsetnbitdataset
+(JNIEnv *env, jclass clss, jlong sdsid, jint start_bit, jint bit_len, jint sign_ext, jint fill_one)
+{
+ intn rval;
+ int32 id = (int32)sdsid;
+
+ rval = SDsetnbitdataset(id, (intn) start_bit, (intn) bit_len,
+ (intn) sign_ext, (intn) fill_one);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDsetcompress
+(JNIEnv *env, jclass clss, jlong sdsid, jint type, jobject cinfo)
+{
+ intn rval;
+ comp_info cinf;
+ int32 id = (int32)sdsid;
+
+ if (cinfo == NULL) {
+ h4nullArgument(env, "SDsetcompress: cinfo is NULL");
+ } /* end if */
+ else if (getNewCompInfo(env, cinfo, &cinf) == JNI_FALSE) {
+ h4raiseException(env, "SDsetcompress: error creating comp_info struct");
+ }
+ else {
+ rval = SDsetcompress(id, (comp_coder_t)type, (comp_info *)&cinf);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDgetcompinfo
+(JNIEnv *env, jclass clss, jlong sdsid, jobject cinfo)
+{
+ intn rval;
+ comp_coder_t coder;
+ comp_info cinf;
+ int32 id = (int32)sdsid;
+
+ if (cinfo == NULL) {
+ h4nullArgument(env, "SDgetcompinfo: cinfo is NULL");
+ } /* end if */
+ else {
+ rval = SDgetcompinfo(id, (comp_coder_t *)&coder, (comp_info *)&cinf);
+
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+ else {
+ if (setNewCompInfo(env, cinfo, coder, &cinf) == JNI_FALSE)
+ h4raiseException(env, "SDgetcompinfo: error creating comp_info struct");
+ }
+ }
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDgetcompress
+(JNIEnv *env, jclass clss, jlong sdsid, jobject cinfo)
+{
+ intn rval;
+ comp_coder_t coder;
+ comp_info cinf;
+ int32 id = (int32)sdsid;
+
+ if (cinfo == NULL) {
+ h4nullArgument(env, "SDgetcompress: cinfo is NULL");
+ } /* end if */
+ else {
+ rval = SDgetcompress(id, (comp_coder_t *)&coder, (comp_info *)&cinf);
+
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+ else {
+ if (setNewCompInfo(env, cinfo, coder, &cinf) == JNI_FALSE)
+ h4raiseException(env, "SDgetcompress: error creating comp_info struct");
+ }
+ }
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDsetaccesstype
+(JNIEnv *env, jclass clss, jlong sdsid, jint accesstype)
+{
+ intn rval;
+ int32 id = (int32)sdsid;
+
+ rval = SDsetaccesstype(id, (uintn)accesstype);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDsetblocksize
+(JNIEnv *env, jclass clss, jlong sdsid, jint block_size)
+{
+ intn rval;
+ int32 id = (int32)sdsid;
+
+ rval = SDsetblocksize(id, (int32)block_size);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDsetfillmode
+(JNIEnv *env, jclass clss, jlong sdsid, jint fillmode)
+{
+ intn rval;
+ int32 id = (int32)sdsid;
+
+ rval = SDsetfillmode(id, (intn)fillmode);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDsetdimval_1comp
+(JNIEnv *env, jclass clss, jlong sdsid, jint comp_mode)
+{
+ intn rval;
+ int32 id = (int32)sdsid;
+
+ rval = SDsetdimval_comp(id, (intn)comp_mode);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDisdimval_1bwcomp
+(JNIEnv *env, jclass clss, jlong dimid)
+{
+ intn rval;
+
+ rval = SDisdimval_bwcomp((int32)dimid);
+ if (rval == SD_DIMVAL_BW_COMP) {
+ return JNI_TRUE;
+ }
+ else if (rval == SD_DIMVAL_BW_INCOMP) {
+ return JNI_FALSE;
+ }
+ else
+ CALL_ERROR_CHECK();
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDsetchunk
+(JNIEnv *env, jclass clss, jlong sdsid, jobject chunk_def, jint flags)
+{
+ intn rval;
+ HDF_CHUNK_DEF c_def;
+ int32 id = (int32)sdsid;
+
+ if (chunk_def == NULL) {
+ h4nullArgument(env, "SDsetchunk: chunk_def is NULL");
+ } /* end if */
+ else if (getChunkInfo(env, chunk_def, &c_def) == JNI_FALSE) {
+ h4raiseException(env, "SDsetchunk: error creating chunk_def struct");
+ }
+ else {
+ rval = SDsetchunk(id, c_def, (int32)flags);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+ return JNI_TRUE;
+}
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDgetchunkinfo
+(JNIEnv *env, jclass clss, jlong sdsid, jobject chunk_def, jintArray cflags)
+{
+ int32 rval;
+ HDF_CHUNK_DEF cdef;
+ jint *flgs;
+ jboolean bb;
+ jboolean cbb = 0;
+ int32 id = (int32)sdsid;
+
+ if (chunk_def == NULL) {
+ h4nullArgument(env, "SDgetchunkinfo: chunk_def is NULL");
+ } /* end if */
+ else if (cflags == NULL) {
+ h4nullArgument(env, "SDgetchunkinfo: cflags is NULL");
+ } /* end else if */
+ else {
+ flgs = ENVPTR->GetIntArrayElements(ENVPAR cflags, &bb);
+ if (flgs == NULL) {
+ h4JNIFatalError(env, "SDgetchunkinfo: cflags not pinned");
+ } /* end if */
+ else {
+ rval = SDgetchunkinfo(id, &cdef, (int32 *)&(flgs[0]));
+
+ if (rval == FAIL) {
+ cbb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ else {
+ /* convert cdef to HDFchunkinfo */
+ if (makeChunkInfo(env, chunk_def, (int32)*flgs, &cdef) == JNI_FALSE)
+ h4raiseException(env, "SDgetchunkinfo: error creating chunk_def struct");
+ }
+ ENVPTR->ReleaseIntArrayElements(ENVPAR cflags, (jint *)flgs, cbb);
+ }
+ }
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDreadchunk
+(JNIEnv *env, jclass clss, jlong sdid, jintArray origin, jbyteArray dat)
+{
+ int32 rval;
+ jbyte * s;
+ jint *arr;
+ jboolean bb;
+ jboolean cbb = 0;
+ int32 id = (int32)sdid;
+
+ if (dat == NULL) {
+ h4nullArgument(env, "SDreadchunk: data is NULL");
+ } /* end if */
+ else if (origin == NULL) {
+ h4nullArgument(env, "SDreadchunk: origin is NULL");
+ } /* end else if */
+ else {
+ arr = ENVPTR->GetIntArrayElements(ENVPAR origin, &bb);
+ if (arr == NULL) {
+ h4JNIFatalError(env, "SDreadchunk: origin not pinned");
+ } /* end if */
+ else {
+ s = ENVPTR->GetByteArrayElements(ENVPAR dat, &bb);
+ if (s == NULL) {
+ h4JNIFatalError(env, "SDreadchunk: data not pinned");
+ } /* end if */
+ else {
+ rval = SDreadchunk(id, (int32 *)arr, s);
+
+ if (rval == FAIL) {
+ cbb = JNI_ABORT;
+ CALL_ERROR_CHECK();
+ }
+ ENVPTR->ReleaseByteArrayElements(ENVPAR dat, s, cbb);
+ }
+ ENVPTR->ReleaseIntArrayElements(ENVPAR origin, arr, JNI_ABORT);
+ }
+ }
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_SDsetchunkcache
+(JNIEnv *env, jclass clss, jlong sdsid, jint maxcache, jint flags)
+{
+ intn rval;
+ int32 id = (int32)sdsid;
+
+ rval = SDsetchunkcache(id, (int32)maxcache, (int32)flags);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)rval;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDwritechunk
+(JNIEnv *env, jclass clss, jlong sdid, jintArray origin, jbyteArray dat)
+{
+ int32 rval;
+ jbyte *s;
+ jint *arr;
+ jboolean bb;
+ int32 id = (int32)sdid;
+
+ if (dat == NULL) {
+ h4nullArgument(env, "SDwritechunk: data is NULL");
+ } /* end if */
+ else if (origin == NULL) {
+ h4nullArgument(env, "SDwritechunk: origin is NULL");
+ } /* end else if */
+ else {
+ arr = ENVPTR->GetIntArrayElements(ENVPAR origin, &bb);
+ if (arr == NULL) {
+ h4JNIFatalError(env, "SDwritechunk: origin not pinned");
+ } /* end if */
+ else {
+ s = ENVPTR->GetByteArrayElements(ENVPAR dat, &bb);
+ if (s == NULL) {
+ h4JNIFatalError(env, "SDwritechunk: data not pinned");
+ } /* end if */
+ else {
+ rval = SDwritechunk(id, (int32 *)arr, s);
+
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+ ENVPTR->ReleaseByteArrayElements(ENVPAR dat, s, JNI_ABORT);
+ }
+ ENVPTR->ReleaseIntArrayElements(ENVPAR origin, arr, JNI_ABORT);
+ }
+ }
+ return JNI_TRUE;
+}
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_SDcheckempty
+(JNIEnv *env, jclass clss, jlong sdsid, jintArray emptySDS)
+{
+ int32 rval;
+ jboolean isCopy;
+ intn *isempty;
+ int32 id = (int32)sdsid;
+
+ if (emptySDS == NULL) {
+ h4nullArgument(env, "SDcheckempty: emptySDS is NULL");
+ } /* end if */
+ else {
+ isempty = (intn *)ENVPTR->GetIntArrayElements(ENVPAR emptySDS, &isCopy);
+ if (isempty == NULL) {
+ h4JNIFatalError(env, "SDcheckempty: emptySDS not pinned");
+ } /* end if */
+ else {
+ rval = SDcheckempty(id, isempty);
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR emptySDS, isempty, JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR emptySDS, isempty, 0);
+ }
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/src/jni/hdfstructsutil.c b/java/src/jni/hdfstructsutil.c
new file mode 100644
index 0000000..ad44d74
--- /dev/null
+++ b/java/src/jni/hdfstructsutil.c
@@ -0,0 +1,639 @@
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+/*
+ * This module contains code to translate between the HDF C union 'comp_info'
+ * and a subclass of the Java class CompInfo.
+ *
+ * This is nasty and ugly and probably buggy.
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf.h"
+#include "jni.h"
+#include "h4jni.h"
+
+/*
+ * Get information from a Java HDFNewCompInfo object in to a C comp_info
+ * struct.
+ *
+ * Extract information for the different types of compression.
+ */
+
+jboolean
+getNewCompInfo
+(JNIEnv *env, jobject ciobj, comp_info *cinf)
+{
+ jfieldID jf;
+ jclass jc;
+ jint ctype;
+
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFNewCompInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "ctype", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ctype = ENVPTR->GetIntField(ENVPAR ciobj, jf);
+
+ switch(ctype) {
+ case COMP_CODE_NONE:
+ case COMP_CODE_RLE:
+ default:
+ break;
+
+ case COMP_CODE_SKPHUFF:
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFSKPHUFFCompInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "skp_size", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->skphuff.skp_size = ENVPTR->GetIntField(ENVPAR ciobj, jf);
+ break;
+
+ case COMP_CODE_DEFLATE:
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFDeflateCompInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "level", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->deflate.level = ENVPTR->GetIntField(ENVPAR ciobj, jf);
+ break;
+ case COMP_CODE_SZIP:
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFSZIPCompInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "bits_per_pixel", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+
+ cinf->szip.bits_per_pixel = ENVPTR->GetIntField(ENVPAR ciobj, jf);
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "options_mask", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->szip.options_mask = ENVPTR->GetIntField(ENVPAR ciobj, jf);
+
+/* changes from hdf-42r0 to hdf-42r1
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "compression_mode", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->szip.compression_mode = ENVPTR->GetIntField(ENVPAR ciobj, jf);
+*/
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "pixels", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->szip.pixels = ENVPTR->GetIntField(ENVPAR ciobj, jf);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "pixels_per_block", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->szip.pixels_per_block = ENVPTR->GetIntField(ENVPAR ciobj, jf);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "pixels_per_scanline", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->szip.pixels_per_scanline = ENVPTR->GetIntField(ENVPAR ciobj, jf);
+ break;
+ case COMP_CODE_NBIT:
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFNBITCompInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "nt", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->nbit.nt = ENVPTR->GetIntField(ENVPAR ciobj, jf);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "sign_ext", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->nbit.sign_ext = ENVPTR->GetIntField(ENVPAR ciobj, jf);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "fill_one", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->nbit.fill_one = ENVPTR->GetIntField(ENVPAR ciobj, jf);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "start_bit", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->nbit.start_bit = ENVPTR->GetIntField(ENVPAR ciobj, jf);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "bit_len", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->nbit.bit_len = ENVPTR->GetIntField(ENVPAR ciobj, jf);
+ break;
+ }
+
+ return JNI_TRUE;
+}
+
+/*
+ * Extract info from C comp_info struct, put in Java HDFCompInfo object.
+ *
+ * Put in the fields for each compression method.
+ */
+jboolean
+setNewCompInfo
+(JNIEnv *env, jobject ciobj, comp_coder_t coder, comp_info *cinf)
+{
+ jfieldID jf;
+ jclass jc;
+
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFCompInfo");
+
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ else {
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "ctype", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntField(ENVPAR ciobj, jf, coder);
+ }
+
+
+ switch(coder) {
+ case COMP_CODE_NONE:
+ case COMP_CODE_RLE:
+ default:
+ break;
+ case COMP_CODE_SKPHUFF:
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFSKPHUFFCompInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "ctype", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntField(ENVPAR ciobj, jf, COMP_CODE_SKPHUFF);
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "skp_size", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntField(ENVPAR ciobj, jf, cinf->skphuff.skp_size );
+ break;
+
+ case COMP_CODE_DEFLATE:
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFDeflateCompInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "ctype", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntField(ENVPAR ciobj, jf, COMP_CODE_DEFLATE);
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "level", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntField(ENVPAR ciobj, jf, cinf->deflate.level );
+ break;
+ case COMP_CODE_SZIP:
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFSZIPCompInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "ctype", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntField(ENVPAR ciobj, jf, COMP_CODE_SZIP);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "bits_per_pixel", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntField(ENVPAR ciobj, jf, cinf->szip.bits_per_pixel);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "options_mask", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntField(ENVPAR ciobj, jf, cinf->szip.options_mask);
+
+/* changes from hdf-42r0 to hdf-42r1
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "compression_mode", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntField(ENVPAR ciobj, jf, cinf->szip.compression_mode);
+*/
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "pixels", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntField(ENVPAR ciobj, jf, cinf->szip.pixels);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "pixels_per_block", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntField(ENVPAR ciobj, jf, cinf->szip.pixels_per_block);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "pixels_per_scanline", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntField(ENVPAR ciobj, jf, cinf->szip.pixels_per_scanline);
+ break;
+ case COMP_CODE_NBIT:
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFNBITCompInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "ctype", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntField(ENVPAR ciobj, jf, COMP_CODE_NBIT);
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "nt", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntField(ENVPAR ciobj, jf, cinf->nbit.nt);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "sign_ext", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntField(ENVPAR ciobj, jf, cinf->nbit.sign_ext );
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "fill_one", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntField(ENVPAR ciobj, jf, cinf->nbit.fill_one);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "start_bit", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntField(ENVPAR ciobj, jf, cinf->nbit.start_bit );
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "bit_len", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntField(ENVPAR ciobj, jf, cinf->nbit.bit_len);
+ break;
+ }
+
+ return JNI_TRUE;
+}
+
+
+/*
+ * Get info from old style C comp_info struct, put in HDFCompInfo object.
+ */
+jboolean
+getOldCompInfo
+(JNIEnv *env, jobject ciobj, comp_info *cinf)
+{
+ jfieldID jf;
+ jclass jc;
+ jint ctype;
+
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFOldCompInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "ctype", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ctype = ENVPTR->GetIntField(ENVPAR ciobj, jf);
+
+ switch(ctype) {
+ case COMP_NONE:
+ case COMP_RLE:
+ case COMP_IMCOMP:
+ default:
+ break;
+
+ case COMP_JPEG:
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFJPEGCompInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "quality", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->jpeg.quality = ENVPTR->GetIntField(ENVPAR ciobj, jf);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "force_baseline", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->jpeg.force_baseline = ENVPTR->GetIntField(ENVPAR ciobj, jf);
+ break;
+ }
+
+ return JNI_TRUE;
+}
+
+/*
+ * Get the Chunk info from C HDF_CHUNK_DEF struct, put in
+ * Java HDFChunkInfo object.
+ */
+jboolean
+getChunkInfo
+(JNIEnv *env, jobject chunkobj, HDF_CHUNK_DEF *cinf)
+{
+ jfieldID jf;
+ jclass jc;
+ jint ctype;
+ jobject larr;
+ jint * lens;
+ int i;
+ jboolean bval;
+ jboolean bb;
+
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFChunkInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "ctype", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ ctype = ENVPTR->GetIntField(ENVPAR chunkobj, jf);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "chunk_lengths", "[I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ larr = ENVPTR->GetObjectField(ENVPAR chunkobj,jf);
+ if (larr == NULL) {
+ return JNI_FALSE;
+ }
+
+ lens = (jint *)ENVPTR->GetIntArrayElements(ENVPAR (jintArray)larr,&bb);
+
+ for (i = 0; i < MAX_VAR_DIMS; i++) {
+ cinf->comp.chunk_lengths[i] = (int32)lens[i];
+ }
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR (jintArray)larr,(jint *)lens,JNI_ABORT);
+
+ if (ctype == (HDF_CHUNK | HDF_COMP)) {
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "comp_type", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->comp.comp_type = ENVPTR->GetIntField(ENVPAR chunkobj, jf);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "cinfo", "Lhdf/hdflib/HDFCompInfo;");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ larr = ENVPTR->GetObjectField(ENVPAR chunkobj,jf);
+ if (larr == NULL) {
+ return JNI_FALSE;
+ }
+
+ /* set compression information */
+ bval = getNewCompInfo(env, (jobject)larr, &(cinf->comp.cinfo));
+
+ }
+ else if (ctype == (HDF_CHUNK | HDF_NBIT)) {
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFNBITChunkInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "chunk_lengths", "[I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ larr = ENVPTR->GetObjectField(ENVPAR chunkobj,jf);
+ if (larr == NULL) {
+ return JNI_FALSE;
+ }
+
+ lens = (jint *)ENVPTR->GetIntArrayElements(ENVPAR (jintArray)larr,&bb);
+
+ for (i = 0; i < MAX_VAR_DIMS; i++) {
+ cinf->nbit.chunk_lengths[i] = (int32)lens[i];
+ }
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR (jintArray)larr,(jint *)lens,JNI_ABORT);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "start_bit", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->nbit.start_bit = ENVPTR->GetIntField(ENVPAR chunkobj, jf);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "bit_len", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->nbit.bit_len = ENVPTR->GetIntField(ENVPAR chunkobj, jf);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "sign_ext", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->nbit.sign_ext = ENVPTR->GetIntField(ENVPAR chunkobj, jf);
+
+ jf = ENVPTR->GetFieldID(ENVPAR jc, "fill_one", "I");
+ if (jf == NULL) {
+ return JNI_FALSE;
+ }
+ cinf->nbit.fill_one = ENVPTR->GetIntField(ENVPAR chunkobj, jf);
+ }
+
+ return JNI_TRUE;
+}
+
+/*
+ * Create C HDF_CHUNK_DEF struct from Java HDFChunkInfo object.
+ *
+ * Determine the compression method, and create an appropriate subclass
+ * of HDFCompInfo. Then call the constructor for HDFChunkInfo.
+ */
+jboolean
+makeChunkInfo
+(JNIEnv *env, jobject chunkobj, int32 flgs, HDF_CHUNK_DEF *cinf)
+{
+ jclass jc;
+ jclass jci;
+ jmethodID jmi;
+ jintArray rarray;
+ jobject compinfo;
+
+ rarray = ENVPTR->NewIntArray(ENVPAR MAX_VAR_DIMS);
+ if (rarray == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetIntArrayRegion(ENVPAR rarray,0,MAX_VAR_DIMS,(jint *)cinf->chunk_lengths);
+
+ /* release rarray? */
+
+ jci = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFChunkInfo");
+ if (jci == NULL) {
+ return JNI_FALSE;
+ }
+
+ switch (flgs) {
+ case HDF_CHUNK:
+ default:
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFCompInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jmi = ENVPTR->GetMethodID(ENVPAR jc, "<init>", "()V");
+ if (jmi == NULL) {
+ return JNI_FALSE;
+ }
+ compinfo = ENVPTR->NewObject(ENVPAR jc,jmi);
+ break;
+ case (HDF_CHUNK | HDF_COMP):
+ switch (cinf->comp.comp_type) {
+ case COMP_CODE_NONE:
+ default:
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFCompInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jmi = ENVPTR->GetMethodID(ENVPAR jc, "<init>", "()V");
+ if (jmi == NULL) {
+ return JNI_FALSE;
+ }
+ compinfo = ENVPTR->NewObject(ENVPAR jc,jmi);
+ break;
+ case COMP_CODE_JPEG:
+ /* new HDFJPEGCompInfo() */
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFJPEGCompInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jmi = ENVPTR->GetMethodID(ENVPAR jc, "<init>", "(II)V");
+ if (jmi == NULL) {
+ return JNI_FALSE;
+ }
+ compinfo = ENVPTR->NewObject(ENVPAR jc,jmi,
+ cinf->comp.cinfo.jpeg.quality,
+ cinf->comp.cinfo.jpeg.force_baseline);
+ break;
+ case COMP_CODE_DEFLATE:
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFDeflateCompInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jmi = ENVPTR->GetMethodID(ENVPAR jc, "<init>", "(I)V");
+ if (jmi == NULL) {
+ return JNI_FALSE;
+ }
+ compinfo = ENVPTR->NewObject(ENVPAR jc,jmi, cinf->comp.cinfo.deflate.level);
+ break;
+ case COMP_CODE_SZIP:
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFSZIPCompInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jmi = ENVPTR->GetMethodID(ENVPAR jc, "<init>", "(IIIII)V");
+ if (jmi == NULL) {
+ return JNI_FALSE;
+ }
+ compinfo = ENVPTR->NewObject(ENVPAR jc,jmi,
+ cinf->comp.cinfo.szip.bits_per_pixel,
+ cinf->comp.cinfo.szip.options_mask,
+ cinf->comp.cinfo.szip.pixels,
+ cinf->comp.cinfo.szip.pixels_per_block,
+ cinf->comp.cinfo.szip.pixels_per_scanline);
+ break;
+ }
+ break;
+ case (HDF_CHUNK | HDF_NBIT):
+ /* new HDFCompInfo() */
+ jc = ENVPTR->FindClass(ENVPAR "hdf/hdflib/HDFNBITChunkInfo");
+ if (jc == NULL) {
+ return JNI_FALSE;
+ }
+ jmi = ENVPTR->GetMethodID(ENVPAR jc, "<init>",
+ "([IIIII;)V");
+ if (jmi == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->CallVoidMethod(ENVPAR chunkobj,jmi, rarray,
+ cinf->nbit.start_bit,
+ cinf->nbit.bit_len,
+ cinf->nbit.sign_ext,
+ cinf->nbit.fill_one);
+ return JNI_TRUE;
+ break;
+ }
+
+ jmi = ENVPTR->GetMethodID(ENVPAR jci, "<init>",
+ "([IILhdf/hdflib/HDFCompInfo;)V");
+ if (jmi == NULL) {
+ return JNI_FALSE;
+ }
+ ENVPTR->CallVoidMethod(ENVPAR chunkobj,jmi, rarray, cinf->comp.comp_type,
+ compinfo);
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_HCget_1config_1info
+(JNIEnv *env, jclass clss, jint coder_type)
+{
+ intn rval;
+ uint32 compression_config_info=0;
+
+ /* check for success... */
+ rval = HCget_config_info( (comp_coder_t) coder_type, (uint32*)&compression_config_info);
+
+ if (rval == FAIL)
+ return -1;
+
+ return compression_config_info;
+}
+
+
+#ifdef __cplusplus
+}
+#endif
+
diff --git a/java/src/jni/hdfvdataImp.c b/java/src/jni/hdfvdataImp.c
new file mode 100644
index 0000000..1ee10a2
--- /dev/null
+++ b/java/src/jni/hdfvdataImp.c
@@ -0,0 +1,1077 @@
+
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+/*
+ * This code is the C-interface called by Java programs to access the
+ * HDF 4 library.
+ *
+ * Each routine wraps a single HDF entry point, generally with the
+ * analogous arguments and return codes.
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+#include "hdf.h"
+#include "jni.h"
+#include "h4jni.h"
+
+JNIEXPORT jlong JNICALL
+Java_hdf_hdflib_HDFLibrary_VSattach
+(JNIEnv *env, jclass clss, jlong fid, jint vdata_ref, jstring accessmode)
+{
+ int32 retVal;
+ const char *access;
+ HFILEID id = (HFILEID)fid;
+
+ PIN_JAVA_STRING(accessmode, access);
+
+ if (access != NULL) {
+ /* open HDF file specified by hdf_HDF_file */
+ retVal = VSattach(id, (int32)vdata_ref, access);
+
+ UNPIN_JAVA_STRING(accessmode, access);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return (jlong)retVal;
+}
+
+JNIEXPORT void JNICALL
+Java_hdf_hdflib_HDFLibrary_VSdetach
+(JNIEnv *env, jclass clss, jlong vdata_id)
+{
+ int32 retVal;
+
+ retVal = VSdetach((int32)vdata_id);
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+}
+
+JNIEXPORT jlong JNICALL
+Java_hdf_hdflib_HDFLibrary_VSgetid
+(JNIEnv *env, jclass clss, jlong fid, jint vdata_ref)
+{
+ int32 retVal;
+ HFILEID id = (HFILEID)fid;
+
+ retVal = VSgetid(id, (int32)vdata_ref);
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jlong)retVal;
+}
+
+JNIEXPORT void JNICALL
+Java_hdf_hdflib_HDFLibrary_VSgetclass
+(JNIEnv *env, jclass clss, jlong vdata_id, jobjectArray hdfclass)
+{
+ char *className;
+ jstring rstring;
+ jclass jc;
+ jobject o;
+ jboolean bb;
+
+ if (hdfclass == NULL) {
+ h4nullArgument(env, "VSgetclass: hdfclass is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR hdfclass) < 1) {
+ h4badArgument(env, "VSgetclass: output array hdfclass < order 1");
+ } /* end else if */
+ else {
+ className = (char *)HDmalloc(VSNAMELENMAX+1);
+ if (className == NULL) {
+ h4outOfMemory(env, "VSgetclass");
+ } /* end if */
+ else {
+ /* get the class class of the vgroup */
+ if(VSgetclass((int32)vdata_id, className) < 0) {
+ CALL_ERROR_CHECK();
+ } /* end if */
+ else {
+ className[VSNAMELENMAX] = '\0';
+
+ /* convert it to java string */
+ rstring = ENVPTR->NewStringUTF(ENVPAR className);
+
+ o = ENVPTR->GetObjectArrayElement(ENVPAR hdfclass,0);
+ if (o == NULL) {
+ HDfree(className);
+ return;
+ }
+ jc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (jc == NULL) {
+ HDfree(className);
+ return;
+ }
+ bb = ENVPTR->IsInstanceOf(ENVPAR o,jc);
+ if (bb == JNI_FALSE) {
+ HDfree(className);
+ return;
+ }
+ ENVPTR->SetObjectArrayElement(ENVPAR hdfclass,0,(jobject)rstring);
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ } /* end else */
+
+ HDfree(className);
+ } /* end else */
+ } /* end else */
+}
+
+JNIEXPORT void JNICALL
+Java_hdf_hdflib_HDFLibrary_VSgetname
+(JNIEnv *env, jclass clss, jlong vdata_id, jobjectArray hdfname)
+{
+ char nameName[VSNAMELENMAX+1];
+ jstring rstring;
+ jclass jc;
+ jobject o;
+ jboolean bb;
+
+ if (hdfname == NULL) {
+ h4nullArgument(env, "VSgetname: hdfname is NULL");
+ } /* end if */
+ else {
+ /* get the name name of the vgroup */
+ if(VSgetname((int32)vdata_id, nameName) < 0) {
+ CALL_ERROR_CHECK();
+ } /* end if */
+ else {
+ nameName[VSNAMELENMAX]='\0';
+
+ /* convert it to java string */
+ rstring = ENVPTR->NewStringUTF(ENVPAR nameName);
+
+ o = ENVPTR->GetObjectArrayElement(ENVPAR hdfname,0);
+ if (o == NULL) {
+ return;
+ }
+ jc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (jc == NULL) {
+ return;
+ }
+ bb = ENVPTR->IsInstanceOf(ENVPAR o,jc);
+ if (bb == JNI_FALSE) {
+ return;
+ }
+ ENVPTR->SetObjectArrayElement(ENVPAR hdfname,0,(jobject)rstring);
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ } /* end else */
+ } /* end else */
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VSelts
+(JNIEnv *env, jclass clss, jlong vdata_id)
+{
+ int32 retVal;
+
+ retVal = VSelts((int32)vdata_id);
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)retVal;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSfdefine
+(JNIEnv *env, jclass clss, jlong vdata_id, jstring fieldname, jint numbertype, jint order)
+{
+ int32 retVal;
+ const char *fldName;
+
+ PIN_JAVA_STRING(fieldname, fldName);
+
+ if (fldName != NULL) {
+ retVal = VSfdefine((int32)vdata_id,(char *)fldName,(int32)numbertype,(int32)order);
+
+ UNPIN_JAVA_STRING(fieldname, fldName);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSfexist
+(JNIEnv *env, jclass clss, jlong vdata_id, jstring fieldname)
+{
+ int32 retVal;
+ const char *fldName;
+
+ PIN_JAVA_STRING(fieldname, fldName);
+
+ if (fldName != NULL) {
+ /* Check the fields */
+ retVal = VSfexist((int32)vdata_id,(char *)fldName);
+
+ UNPIN_JAVA_STRING(fieldname, fldName);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VSfind
+(JNIEnv *env, jclass clss, jlong vdata_id, jstring fieldname)
+{
+ int32 retVal;
+ const char *fldName;
+
+ PIN_JAVA_STRING(fieldname, fldName);
+
+ if (fldName != NULL) {
+ /* Check the fields */
+ retVal = VSfind((int32)vdata_id,(char *)fldName);
+
+ UNPIN_JAVA_STRING(fieldname, fldName);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return(retVal);
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VSsetblocksize
+(JNIEnv *env, jclass clss, jlong vdata_id, jint blocksize)
+{
+ int32 retVal;
+
+ /* Check the fields */
+ retVal = VSsetblocksize((int32)vdata_id,(int32)blocksize);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+
+ return(retVal);
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VSsetnumblocks
+(JNIEnv *env, jclass clss, jlong vdata_id, jint numblocks)
+{
+ int32 retVal;
+
+ /* Check the fields */
+ retVal = VSsetnumblocks((int32)vdata_id,(int32)numblocks);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+
+ return(retVal);
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VSgetfields
+(JNIEnv *env, jclass clss, jlong vdata_id, jobjectArray fields)
+{
+ int retVal;
+ jclass jc;
+ char *flds;
+ jstring rstring;
+ jobject o;
+ jboolean bb;
+
+ if (fields == NULL) {
+ h4nullArgument(env, "VSgetfields: fields is NULL");
+ } /* end else if */
+ else {
+ flds = (char *)HDmalloc(25600);
+
+ if (flds == NULL) {
+ h4outOfMemory(env, "VSgetfields");
+ } /* end if */
+ else {
+ /* get the fields name in the vdata */
+ retVal = VSgetfields((int32)vdata_id, flds);
+
+ flds[25599] = '\0';
+
+ if (retVal == FAIL) {
+ CALL_ERROR_CHECK();
+ } /* end if */
+ else {
+ rstring = ENVPTR->NewStringUTF(ENVPAR flds);
+ o = ENVPTR->GetObjectArrayElement(ENVPAR fields,0);
+ if (o == NULL) {
+ HDfree(flds);
+ return FAIL;
+ }
+ jc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (jc == NULL) {
+ HDfree(flds);
+ return FAIL;
+ }
+ bb = ENVPTR->IsInstanceOf(ENVPAR o,jc);
+ if (bb == JNI_FALSE) {
+ HDfree(flds);
+ return FAIL;
+ }
+ ENVPTR->SetObjectArrayElement(ENVPAR fields,0,(jobject)rstring);
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ } /* end else */
+
+ HDfree(flds);
+ } /* end else */
+ } /* end else */
+
+ return retVal;
+}
+
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VSgetinterlace
+(JNIEnv *env, jclass clss, jlong vdata_id)
+{
+ int32 retVal;
+
+ retVal = VSgetinterlace((int32)vdata_id);
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)retVal;
+}
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSinquire
+(JNIEnv *env, jclass clss, jlong vdata_id, jintArray iargs, jobjectArray sargs)
+{
+ intn rval;
+ char *flds;
+ char *name;
+ jclass jc;
+ jstring rstring;
+ jint * theIargs;
+ jboolean bb;
+ jobject o;
+
+
+ if (iargs == NULL) {
+ h4nullArgument(env, "VSinquire: iargs is NULL");
+ } /* end if */
+ else if (sargs == NULL) {
+ h4nullArgument(env, "VSinquire: sargs is NULL");
+ } /* end else if */
+ else if (ENVPTR->GetArrayLength(ENVPAR iargs) < 3) {
+ h4badArgument(env, "VSinquire: output array iargs < order 3");
+ } /* end else if */
+ else if (ENVPTR->GetArrayLength(ENVPAR sargs) < 2) {
+ h4badArgument(env, "VSinquire: output array sargs < order 2");
+ } /* end else if */
+ else {
+ flds = (char *)HDmalloc(MAX_FIELD_SIZE+1);
+
+ if (flds == NULL) {
+ h4outOfMemory(env, "VSinquire");
+ } /* end if */
+ else {
+ name = (char *)malloc(MAX_NC_NAME+1);
+
+ if (name == NULL) {
+ h4outOfMemory(env, "VSinquire");
+ } /* end if */
+ else {
+ theIargs = ENVPTR->GetIntArrayElements(ENVPAR iargs,&bb);
+
+ if (theIargs == NULL) {
+ h4JNIFatalError(env, "VSinquire: iargs not pinned");
+ } /* end if */
+ else {
+ rval = VSinquire((int32) vdata_id, (int32 *)&(theIargs[0]),
+ (int32 *)&(theIargs[1]), flds, (int32 *)&(theIargs[2]), name);
+
+ flds[MAX_FIELD_SIZE] = '\0';
+ name[MAX_NC_NAME] = '\0';
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR iargs,theIargs,JNI_ABORT);
+ CALL_ERROR_CHECK();
+ } /* end if */
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR iargs,theIargs,0);
+
+ jc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (jc == NULL) {
+ HDfree(flds);
+ HDfree(name);
+ return JNI_FALSE;
+ }
+ o = ENVPTR->GetObjectArrayElement(ENVPAR sargs,0);
+ if (o == NULL) {
+ HDfree(flds);
+ HDfree(name);
+ return JNI_FALSE;
+ }
+ bb = ENVPTR->IsInstanceOf(ENVPAR o,jc);
+ if (bb == JNI_FALSE) {
+ HDfree(flds);
+ return JNI_FALSE;
+ }
+ rstring = ENVPTR->NewStringUTF(ENVPAR flds);
+ ENVPTR->SetObjectArrayElement(ENVPAR sargs,0,(jobject)rstring);
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+
+ o = ENVPTR->GetObjectArrayElement(ENVPAR sargs,1);
+ if (o == NULL) {
+ HDfree(flds);
+ HDfree(name);
+ return JNI_FALSE;
+ }
+ bb = ENVPTR->IsInstanceOf(ENVPAR o,jc);
+ if (bb == JNI_FALSE) {
+ HDfree(flds);
+ HDfree(name);
+ return JNI_FALSE;
+ }
+ rstring = ENVPTR->NewStringUTF(ENVPAR name);
+ ENVPTR->SetObjectArrayElement(ENVPAR sargs,1,(jobject)rstring);
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ }
+ } /* end else */
+
+ HDfree(name);
+ } /* end else */
+
+ HDfree(flds);
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSgetblockinfo
+(JNIEnv *env, jclass clss, jlong vdata_id, jintArray iargs)
+{
+ intn rval;
+ jint * theIargs;
+ jboolean bb;
+
+ if (iargs == NULL) {
+ h4nullArgument(env, "VSgetblockinfo: iargs is NULL");
+ } /* end if */
+ else {
+ theIargs = ENVPTR->GetIntArrayElements(ENVPAR iargs,&bb);
+
+ if (theIargs == NULL) {
+ h4JNIFatalError(env, "VSgetblockinfo: iargs not pinned");
+ } /* end if */
+ else {
+ rval = VSgetblockinfo((int32) vdata_id, (int32 *)&(theIargs[0]),
+ (int32 *)&(theIargs[1]));
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR iargs,theIargs,JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR iargs,theIargs,0);
+ }
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VSlone
+(JNIEnv *env, jclass clss, jlong fid, jintArray ref_array, jint arraysize)
+{
+ int retVal;
+ jint * arr;
+ jboolean bb;
+
+ if (ref_array == NULL) {
+ h4nullArgument(env, "VSlone: ref_array is NULL");
+ } /* end if */
+ else {
+ arr = ENVPTR->GetIntArrayElements(ENVPAR ref_array,&bb);
+
+ if (arr == NULL) {
+ h4JNIFatalError(env, "VSlone: ref_array not pinned");
+ } /* end if */
+ else {
+ /* get the lone vdata reference number in the vdata */
+ retVal = VSlone((int32)fid, (int32 *)arr, (int32)arraysize);
+
+ if (retVal == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR ref_array,arr,JNI_ABORT);
+ CALL_ERROR_CHECK();
+ } /* end if */
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR ref_array,arr,0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return retVal;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VSread
+(JNIEnv *env, jclass clss, jlong vdata_id, jbyteArray databuf, jint nrecords, jint interlace)
+{
+ int32 retVal;
+ jbyte * data;
+ jboolean bb;
+
+ if (databuf == NULL) {
+ h4nullArgument(env, "VSread: databuf is NULL");
+ } /* end if */
+ else {
+ data = (jbyte *)ENVPTR->GetPrimitiveArrayCritical(ENVPAR databuf,&bb);
+
+ if (data == NULL) {
+ h4JNIFatalError(env, "VSread: databuf not pinned");
+ } /* end if */
+ else {
+ /* retrieve the general info. */
+ retVal = VSread((int32)vdata_id, (unsigned char *)data, nrecords, interlace);
+
+ if (retVal == FAIL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR databuf,data,JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR databuf,data,0);
+ }
+ } /* end else */
+ } /* end else */
+
+ return retVal;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VSseek
+(JNIEnv *env, jclass clss, jlong vdata_id, jint nrecord)
+{
+ int32 retVal;
+
+ retVal = VSseek((int32)vdata_id, (int32)nrecord);
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)retVal;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSsetfields
+(JNIEnv *env, jclass clss, jlong vdata_id, jstring fields)
+{
+ int32 retVal;
+ const char *fldPtr;
+
+ PIN_JAVA_STRING(fields, fldPtr);
+
+ if (fldPtr != NULL) {
+ /* set the vdata fields to read */
+ retVal = VSsetfields((int32)vdata_id, (char *)fldPtr);
+
+ UNPIN_JAVA_STRING(fields, fldPtr);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSsetinterlace
+(JNIEnv *env, jclass clss, jlong vdata_id, jint interlace)
+{
+ int32 retVal;
+
+ /* set the interlace for Vdata */
+ retVal = VSsetinterlace((int32)vdata_id, (int32)interlace);
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VSsizeof
+(JNIEnv *env, jclass clss, jlong vdata_id, jstring fields)
+{
+ int32 retVal;
+ const char *fldPtr;
+
+ PIN_JAVA_STRING(fields, fldPtr);
+
+ if (fldPtr != NULL) {
+ /* get the size of a Vdata */
+ retVal = VSsizeof((int32)vdata_id, (char *)fldPtr);
+
+ UNPIN_JAVA_STRING(fields, fldPtr);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return(retVal);
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSappendable
+(JNIEnv *env, jclass clss, jint vkey, jint block_size)
+{
+ int32 rval;
+
+ rval = VSappendable((int32) vkey, (int32) block_size);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT void JNICALL
+Java_hdf_hdflib_HDFLibrary_VSdump
+(JNIEnv *env, jclass clss, jint vkey)
+{
+ VSdump((int32) vkey);
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VSfindclass
+(JNIEnv *env, jclass clss, jlong vdata_id, jstring hdfclassname)
+{
+ int32 rval;
+ const char *string;
+
+ PIN_JAVA_STRING(hdfclassname, string);
+
+ if (string != NULL) {
+ /* get the class name of the vgroup */
+ rval = VSfindclass((int32) vdata_id, string);
+
+ UNPIN_JAVA_STRING(hdfclassname, string);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return rval;
+
+}
+
+
+/* no idea at all how to deal with 2-D arrays.... */
+JNIEXPORT void JNICALL
+Java_hdf_hdflib_HDFLibrary_VSfpack
+(JNIEnv *env, jclass clss, jlong vdata_id, jint action, jstring fields_in_buf,
+ jarray buf, jint bufsize, jstring fields, jarray bufptrs)
+{
+
+ /*
+ VSfpack((int32) vdata_id, (intn) action, char
+ *fields_in_buf, VOIDP buf, intn buf_size, intn
+ n_records, char *fields, VOIDP bufptrs[]);
+ */
+ h4NotImplemented(env, "VSfpack");
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VSgetversion
+(JNIEnv *env, jclass clss, jint key)
+{
+ int32 retVal;
+
+ retVal = VSgetversion((int32) key);
+ if (retVal <= 0)
+ CALL_ERROR_CHECK();
+
+ return (jint)retVal;
+}
+
+JNIEXPORT void JNICALL
+Java_hdf_hdflib_HDFLibrary_VSsetclass
+(JNIEnv *env, jclass clss, jlong vdata_id, jstring vdata_class)
+{
+ int32 retVal;
+ const char *string;
+
+ PIN_JAVA_STRING(vdata_class, string);
+
+ if (string != NULL) {
+ retVal = VSsetclass((int32) vdata_id, (char *)string);
+
+ UNPIN_JAVA_STRING(vdata_class, string);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return;
+}
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSsetexternalfile
+(JNIEnv *env, jclass clss, jint vkey, jstring filename, jint offset)
+{
+ intn rval;
+ const char *string;
+
+ PIN_JAVA_STRING(filename, string);
+
+ if (string != NULL) {
+ rval = VSsetexternalfile((int32) vkey, (char *)string, (int32) offset);
+
+ UNPIN_JAVA_STRING(filename, string);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+
+JNIEXPORT void JNICALL
+Java_hdf_hdflib_HDFLibrary_VSsetname
+(JNIEnv *env, jclass clss, jlong vdata_id, jstring vdata_name)
+{
+ int32 retVal;
+ const char *string;
+
+ PIN_JAVA_STRING(vdata_name, string);
+
+ if (string != NULL) {
+ retVal = VSsetname((int32) vdata_id, (char *)string);
+
+ UNPIN_JAVA_STRING(vdata_name, string);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VSwrite
+(JNIEnv *env, jclass clss, jlong vdata_id, jbyteArray databuf, jint n_records, jint interlace)
+{
+ int32 rval;
+ jbyte *b;
+ jboolean bb;
+
+ if (databuf == NULL) {
+ h4nullArgument(env, "VSwrite: databuf is NULL");
+ } /* end if */
+ else {
+ b = ENVPTR->GetByteArrayElements(ENVPAR databuf,&bb);
+
+ if (b == NULL) {
+ h4JNIFatalError(env, "VSwrite: databuf not pinned");
+ } /* end if */
+ else {
+ rval = VSwrite((int32) vdata_id, (unsigned char *)b, (int32) n_records, (int32) interlace);
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR databuf,b,JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR databuf,b,0);
+ }
+ } /* end else */
+ } /* end else */
+
+ return rval;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSattrinfo
+(JNIEnv *env, jclass clss, jlong id, jint index, jint attr_index, jobjectArray name, jintArray argv)
+{
+ int32 retVal;
+ jint *theArgs;
+ jboolean bb;
+ jclass Sjc;
+ jstring str;
+ jobject o;
+ char nam[256]; /* what is the correct constant??? */
+
+ if (name == NULL) {
+ h4nullArgument(env, "VSattrinfo: name is NULL");
+ } /* end if */
+ else if (argv == NULL) {
+ h4nullArgument(env, "VSattrinfo: argv is NULL");
+ } /* end else if */
+ else if (ENVPTR->GetArrayLength(ENVPAR name) < 1) {
+ h4badArgument(env, "VSattrinfo: output array name < order 1");
+ } /* end else if */
+ else if (ENVPTR->GetArrayLength(ENVPAR argv) < 3) {
+ h4badArgument(env, "VSattrinfo: output array argv < order 3");
+ } /* end else if */
+ else {
+ theArgs = ENVPTR->GetIntArrayElements(ENVPAR argv,&bb);
+
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "VSattrinfo: argv not pinned");
+ } /* end if */
+ else {
+ retVal = VSattrinfo((int32)id, (int32)index, (int32)attr_index,
+ nam,
+ (int32 *)&(theArgs[0]), (int32 *)&(theArgs[1]),
+ (int32 *)&(theArgs[2]));
+
+ nam[255] = '\0';
+
+ if (retVal == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv,theArgs,JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv,theArgs,0);
+
+ str = ENVPTR->NewStringUTF(ENVPAR nam);
+ o = ENVPTR->GetObjectArrayElement(ENVPAR name,0);
+ if (o == NULL) {
+ return JNI_FALSE;
+ }
+ Sjc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (Sjc == NULL) {
+ return JNI_FALSE;
+ }
+ bb = ENVPTR->IsInstanceOf(ENVPAR o,Sjc);
+ if (bb == JNI_FALSE) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetObjectArrayElement(ENVPAR name,0,(jobject)str);
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ }
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSfindex
+(JNIEnv *env, jclass clss, jlong id, jstring name, jintArray findex)
+{
+ intn rval;
+ const char *str;
+ int copyMode;
+ jint *arr;
+ jboolean bb;
+
+ copyMode = JNI_ABORT;
+
+ if (findex == NULL) {
+ h4nullArgument(env, "VSfindex: findex is NULL");
+ } /* end if */
+ else {
+ arr = ENVPTR->GetIntArrayElements(ENVPAR findex,&bb);
+
+ if (arr == NULL) {
+ h4JNIFatalError(env, "VSfindex: findex not pinned");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(name, str);
+
+ if (str != NULL) {
+ rval = VSfindex((int32) id, str, (int32 *)arr);
+
+ UNPIN_JAVA_STRING(name, str);
+
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+ else {
+ copyMode = 0;
+ }
+ } /* end if */
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR findex,arr,copyMode);
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VSfindattr
+(JNIEnv *env, jclass clss, jlong id, jint index, jstring name)
+{
+ int32 retVal;
+ const char *cname;
+
+ PIN_JAVA_STRING(name, cname);
+
+ if (cname != NULL) {
+ retVal = VSfindattr((int32)id, (int32)index, cname);
+
+ UNPIN_JAVA_STRING(name, cname);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return retVal;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VSfnattrs
+(JNIEnv *env, jclass clss, jlong id, jint attr)
+{
+ intn retVal;
+
+ retVal = VSfnattrs((int32)id, (int32)attr);
+ if (retVal <= 0)
+ CALL_ERROR_CHECK();
+
+ return (jint)retVal;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSgetattr
+(JNIEnv *env, jclass clss, jlong id, jint field_index, jint attr_index, jbyteArray values)
+{
+ intn rval;
+ jbyte *arr;
+ jboolean bb;
+
+ if (values == NULL) {
+ h4nullArgument(env, "VSgetattr: values is NULL");
+ } /* end if */
+ else {
+ arr = ENVPTR->GetByteArrayElements(ENVPAR values,&bb);
+
+ if (arr == NULL) {
+ h4JNIFatalError(env, "VSgetattr: values not pinned");
+ } /* end if */
+ else {
+ rval = VSgetattr((int32) id, (int32)field_index,
+ (int32) attr_index, (VOIDP) arr);
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR values,arr,JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR values,arr,0);
+ }
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSisattr
+(JNIEnv *env, jclass clss, jlong id)
+{
+ intn rval;
+
+ rval = VSisattr((int32) id);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VSnattrs
+(JNIEnv *env, jclass clss, jlong id)
+{
+ intn retVal;
+
+ retVal = VSnattrs((int32)id);
+ if (retVal <= 0)
+ CALL_ERROR_CHECK();
+
+ return (jint)retVal;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSsetattr__JILjava_lang_String_2JILjava_lang_String_2
+(JNIEnv *env, jclass clss, jlong id, jint index, jstring attr_name, jlong data_type, jint count, jstring values)
+{
+ intn rval;
+ const char *str;
+ const char *val;
+
+ PIN_JAVA_STRING_TWO(attr_name, str, values, val);
+
+ if (str != NULL && val != NULL) {
+ rval = VSsetattr((int32) id, (int32) index, (char *)str,
+ (int32) data_type, (int32) count, (VOIDP) val);
+
+ UNPIN_JAVA_STRING_TWO(attr_name, str, values, val);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSsetattr__JILjava_lang_String_2JI_3B
+(JNIEnv *env, jclass clss, jlong id, jint index, jstring attr_name, jlong data_type, jint count, jbyteArray values)
+{
+ intn rval;
+ jbyte *arr;
+ const char *str;
+ jboolean bb;
+
+ if (values == NULL) {
+ h4nullArgument(env, "VSsetattr: values is NULL");
+ } /* end if */
+ else {
+ arr = ENVPTR->GetByteArrayElements(ENVPAR values,&bb);
+
+ if (arr == NULL) {
+ h4JNIFatalError(env, "VSsetattr: values not pinned");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(attr_name, str);
+
+ if (str != NULL) {
+ rval = VSsetattr((int32) id, (int32) index, (char *)str,
+ (int32) data_type, (int32) count, (VOIDP) arr);
+
+ UNPIN_JAVA_STRING(attr_name, str);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR values,arr,JNI_ABORT);
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/src/jni/hdfvfImp.c b/java/src/jni/hdfvfImp.c
new file mode 100644
index 0000000..b6907f3
--- /dev/null
+++ b/java/src/jni/hdfvfImp.c
@@ -0,0 +1,115 @@
+
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+ /*
+ * This code is the C-interface called by Java programs to access the
+ * HDF 4 library.
+ *
+ * Each routine wraps a single HDF entry point, generally with the
+ * analogous arguments and return codes.
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+#include "hdf.h"
+#include "jni.h"
+#include "h4jni.h"
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VFfieldesize
+(JNIEnv *env, jclass clss, jlong vdata_id, int field_index)
+{
+ int32 retVal;
+
+ retVal = VFfieldesize((int32)vdata_id, (int32)field_index);
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)retVal;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VFfieldisize
+(JNIEnv *env, jclass clss, jlong vdata_id, int field_index)
+{
+ int32 retVal;
+
+ retVal = VFfieldisize((int32)vdata_id, (int32)field_index);
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)retVal;
+}
+
+JNIEXPORT jstring JNICALL
+Java_hdf_hdflib_HDFLibrary_VFfieldname
+(JNIEnv *env, jclass clss, jlong vdata_id, int field_index)
+{
+ jstring rstring;
+ char * str;
+
+ str = VFfieldname((int32) vdata_id, (int32) field_index);
+
+ /* check for error */
+
+ /* convert it to java string */
+ rstring = ENVPTR->NewStringUTF(ENVPAR str);
+
+ return rstring;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VFfieldorder
+(JNIEnv *env, jclass clss, jlong vdata_id, int field_index)
+{
+ int32 retVal;
+
+ retVal = VFfieldorder((int32)vdata_id, (int32)field_index);
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)retVal;
+}
+
+JNIEXPORT jlong JNICALL
+Java_hdf_hdflib_HDFLibrary_VFfieldtype
+(JNIEnv *env, jclass clss, jlong vdata_id, int field_index)
+{
+ int32 retVal;
+
+ retVal = VFfieldtype((int32)vdata_id, (int32)field_index);
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jlong)retVal;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VFnfields
+(JNIEnv *env, jclass clss, jlong key)
+{
+ int32 retVal;
+
+ retVal = VFnfields((int32)key);
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)retVal;
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/src/jni/hdfvgroupImp.c b/java/src/jni/hdfvgroupImp.c
new file mode 100644
index 0000000..884f446
--- /dev/null
+++ b/java/src/jni/hdfvgroupImp.c
@@ -0,0 +1,893 @@
+
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+/*
+ * This code is the C-interface called by Java programs to access the
+ * HDF 4 library.
+ *
+ * Each routine wraps a single HDF entry point, generally with the
+ * analogous arguments and return codes.
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf.h"
+#include "jni.h"
+#include "h4jni.h"
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Vstart
+(JNIEnv *env, jclass clss, jlong fid)
+{
+ intn rval;
+
+ rval = Vstart((int32)fid);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+
+JNIEXPORT jlong JNICALL
+Java_hdf_hdflib_HDFLibrary_Vattach
+(JNIEnv *env, jclass clss, jlong fid, jint vgroup_ref, jstring accessmode)
+{
+ int retVal;
+ const char *access;
+ HFILEID id = (HFILEID)fid;
+
+ PIN_JAVA_STRING(accessmode, access);
+
+ if (access != NULL) {
+ /* open HDF file specified by hdf_HDF_file */
+ retVal = Vattach(id, (int32)vgroup_ref, access);
+
+ UNPIN_JAVA_STRING(accessmode, access);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return (jlong)retVal;
+}
+
+JNIEXPORT void JNICALL
+Java_hdf_hdflib_HDFLibrary_Vdetach
+(JNIEnv *env, jclass clss, jlong vgroup_id)
+{
+ int32 rval;
+
+ rval = Vdetach((int32)vgroup_id);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+}
+
+JNIEXPORT void JNICALL
+Java_hdf_hdflib_HDFLibrary_Vend
+(JNIEnv *env, jclass clss, jlong fid)
+{
+ HFILEID id = (HFILEID)fid;
+ Vend(id);
+}
+
+
+JNIEXPORT jlong JNICALL
+Java_hdf_hdflib_HDFLibrary_Vgetid
+(JNIEnv *env, jclass clss, jlong fid, jint vgroup_ref)
+{
+ int32 rval;
+
+ rval = Vgetid((int32)fid, (int32)vgroup_ref);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jlong)rval;
+}
+
+JNIEXPORT void JNICALL
+Java_hdf_hdflib_HDFLibrary_Vgetclass
+(JNIEnv *env, jclass clss, jlong vgroup_id, jobjectArray hdfclassname)
+{
+ char *className;
+ jstring rstring;
+ jclass jc;
+ jobject o;
+ jboolean bb;
+
+ if (hdfclassname == NULL) {
+ h4nullArgument(env, "Vgetclass: hdfclassname is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR hdfclassname) < 1) {
+ h4badArgument(env, "Vgetclass: output array hdfclassname < order 1");
+ } /* end else if */
+ else {
+ className = (char *)HDmalloc(H4_MAX_NC_CLASS+1);
+
+ if (className == NULL) {
+ h4outOfMemory(env, "Vgetclass");
+ } /* end if */
+ else {
+ /* get the class name of the vgroup */
+ if (Vgetclass((int32)vgroup_id, className) < 0) {
+ /* exception -- failed */
+ CALL_ERROR_CHECK();
+ } /* end if */
+ else {
+ className[H4_MAX_NC_CLASS] = '\0';
+
+ /* convert it to java string */
+ rstring = ENVPTR->NewStringUTF(ENVPAR className);
+
+ /* create a Java String object in the calling environment... */
+ jc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (jc == NULL) {
+ HDfree(className);
+ return ; /* exception is raised */
+ }
+
+ o = ENVPTR->GetObjectArrayElement(ENVPAR hdfclassname,0);
+ if (o == NULL) {
+ HDfree(className);
+ return ;
+ }
+ bb = ENVPTR->IsInstanceOf(ENVPAR o,jc);
+ if (bb == JNI_FALSE) {
+ HDfree(className);
+ return ;
+ }
+ ENVPTR->SetObjectArrayElement(ENVPAR hdfclassname,0,(jobject)rstring);
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ } /* end else */
+
+ HDfree(className);
+ } /* end else */
+ } /* end else */
+
+ return;
+}
+
+JNIEXPORT void JNICALL
+Java_hdf_hdflib_HDFLibrary_Vgetname
+(JNIEnv *env, jclass clss, jlong vgroup_id, jobjectArray hdfname)
+{
+ char *name;
+ jstring rstring;
+ jclass jc;
+ jobject o;
+ jboolean bb;
+
+ if (hdfname == NULL) {
+ h4nullArgument(env, "Vgetname: hdfname is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR hdfname) < 1) {
+ h4badArgument(env, "Vgetname: array hdfname < order 1");
+ } /* end else if */
+ else {
+ name = (char *)HDmalloc(H4_MAX_NC_NAME+1);
+ if (name == NULL) {
+ h4outOfMemory(env, "Vgetname");
+ } /* end if */
+ else {
+ if (Vgetname((int32)vgroup_id, name) == FAIL) {
+ CALL_ERROR_CHECK();
+ } /* end if */
+ else {
+ name[H4_MAX_NC_NAME] = '\0';
+
+ rstring = ENVPTR->NewStringUTF(ENVPAR name);
+
+ jc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (jc == NULL) {
+ HDfree(name);
+ return ; /* exception is raised */
+ }
+ o = ENVPTR->GetObjectArrayElement(ENVPAR hdfname,0);
+ if (o == NULL) {
+ HDfree(name);
+ return ;
+ }
+ bb = ENVPTR->IsInstanceOf(ENVPAR o,jc);
+ if (bb == JNI_FALSE) {
+ HDfree(name);
+ return ;
+ }
+ ENVPTR->SetObjectArrayElement(ENVPAR hdfname,0,(jobject)rstring);
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ } /* end else */
+
+ HDfree(name);
+ } /* end else */
+ } /* end else */
+
+ return;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Visvg
+(JNIEnv *env, jclass clss, jlong vgroup_id, jint vgroup_ref)
+{
+ intn rval;
+
+ rval = Visvg((int32)vgroup_id, vgroup_ref);
+
+ if (rval == FALSE)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Visvs
+(JNIEnv *env, jclass clss, jlong vgroup_id, jint vdata_ref)
+{
+ intn rval;
+
+ rval = Visvs((int32)vgroup_id, vdata_ref);
+
+ if (rval == FALSE)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_Vgettagrefs
+(JNIEnv *env, jclass clss, jlong vgroup_id, jintArray tags, jintArray refs, jint size)
+{
+ jint *tagVal;
+ jint *refVal;
+ int32 retVal;
+ int copyMode = JNI_ABORT;
+
+ jboolean iscopy;
+
+ if (tags == NULL) {
+ h4nullArgument(env, "Vgettagrefs: tags is NULL");
+ } /* end if */
+ else if (refs == NULL) {
+ h4nullArgument(env, "Vgettagrefs: refs is NULL");
+ } /* end else if */
+ else if (ENVPTR->GetArrayLength(ENVPAR tags) < size) {
+ h4badArgument(env, "Vgettagrefs: output array tags < order 'size'");
+ } /* end else if */
+ else if (ENVPTR->GetArrayLength(ENVPAR refs) < size) {
+ h4badArgument(env, "Vgettagrefs: output array refs < order 'size'");
+ } /* end else if */
+ else {
+ tagVal = ENVPTR->GetIntArrayElements(ENVPAR tags,&iscopy);
+
+ if (tagVal == NULL) {
+ h4JNIFatalError(env, "Vgettagrefs: tags not pinned");
+ } /* end if */
+ else {
+ refVal = ENVPTR->GetIntArrayElements(ENVPAR refs,&iscopy);
+
+ if (refVal == NULL) {
+ h4JNIFatalError(env, "Vgettagrefs: refs not pinned");
+ } /* end if */
+ else {
+ /* get the tag/ref pairs number in the vgroup */
+ retVal = Vgettagrefs((int32)vgroup_id, (int32 *)tagVal, (int32 *)refVal, size);
+
+ if (retVal == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+ else {
+ copyMode = 0;
+ }
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR refs,refVal,copyMode);
+ } /* end else */
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR tags,tagVal,copyMode);
+ } /* end else */
+ } /* end else */
+
+ return (jint)retVal;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Vgettagref
+(JNIEnv *env, jclass clss, jlong vgroup_id, jint index, jintArray tagref)
+{
+ intn retVal;
+ jint * theArgs;
+ jboolean bb;
+
+ if (tagref == NULL) {
+ h4nullArgument(env, "Vgettagref: tagref is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR tagref) < 2) {
+ h4badArgument(env, "Vgettagref: output array tagref < order 2");
+ } /* end else if */
+ else {
+ theArgs = ENVPTR->GetIntArrayElements(ENVPAR tagref,&bb);
+
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "Vgettagref: tagref not pinned");
+ } /* end if */
+ else {
+ /* get the tag/ref pairs number in the vgroup */
+ retVal = Vgettagref((int32)vgroup_id, index, (int32 *)&(theArgs[0]),
+ (int32 *)&(theArgs[1]));
+
+ if (retVal == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR tagref,theArgs,JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR tagref,theArgs,0);
+ }
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_Vntagrefs
+(JNIEnv *env, jclass clss, jlong vgroup_id)
+{
+ int32 rval;
+
+ rval = Vntagrefs((int32)vgroup_id);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)rval;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_Vinqtagref
+(JNIEnv *env, jclass clss, jlong vgroup_id, jint tag, jint ref)
+{
+ int32 rval;
+
+ rval = Vinqtagref((int32)vgroup_id, (int32)tag, (int32)ref);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)rval;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_Vlone
+(JNIEnv *env, jclass clss, jlong fid, jintArray ref_array, jint arraysize)
+{
+ int retVal;
+ jint * arr;
+ jboolean bb;
+
+ if (ref_array == NULL) {
+ h4nullArgument(env, "Vlone: ref_array is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR ref_array) < arraysize) {
+ h4badArgument(env, "Vlone: output array ref_array < order 'arraysize'");
+ } /* end else if */
+ else {
+ arr = ENVPTR->GetIntArrayElements(ENVPAR ref_array,&bb);
+
+ if (arr == NULL) {
+ h4JNIFatalError(env, "Vlone: ref_array not pinned");
+ } /* end if */
+ else {
+ /* get the lone group reference number in the vgroup */
+ retVal = Vlone((int32)fid, (int32 *)arr, arraysize);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR ref_array,arr, 0);
+ } /* end else */
+ } /* end else */
+
+ return retVal;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_Vaddtagref
+(JNIEnv *env, jclass clss, jlong vgroup_id, jint tag, jint ref)
+{
+ intn rval;
+
+ rval = Vaddtagref((int32) vgroup_id, (int32) tag, (int32) ref);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)rval;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_Vdeletetagref
+(JNIEnv *env, jclass clss, jlong vgroup_id, jint tag, jint ref)
+{
+ intn rval;
+
+ rval = Vdeletetagref((int32) vgroup_id, (int32) tag, (int32) ref);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)rval;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Vclose_I
+(JNIEnv *env, jclass clss, jlong file_id)
+{
+ intn rval;
+
+ rval = Vclose((int32) file_id);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_Vfind
+(JNIEnv *env, jclass clss, jlong file_id, jstring vgname)
+{
+ int32 rval;
+ const char * vgn;
+
+ PIN_JAVA_STRING(vgname, vgn);
+
+ if (vgn != NULL) {
+ rval = Vfind((int32)file_id, (char *)vgn);
+
+ UNPIN_JAVA_STRING(vgname, vgn);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return rval;
+}
+
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_Vfindclass
+(JNIEnv *env, jclass clss, jlong file_id, jstring vgclassname)
+{
+ int32 rval;
+ const char * vgcn;
+
+ PIN_JAVA_STRING(vgclassname, vgcn);
+
+ if (vgcn != NULL) {
+ rval = Vfindclass((int32)file_id, (char *)vgcn);
+
+ UNPIN_JAVA_STRING(vgclassname, vgcn);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return rval;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_Vflocate
+(JNIEnv *env, jclass clss, jint key, jstring field)
+{
+ int32 rval;
+ const char * fld;
+
+ PIN_JAVA_STRING(field, fld);
+
+ if (fld != NULL) {
+ rval = Vflocate((int32)key, (char *)fld);
+
+ UNPIN_JAVA_STRING(field, fld);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return rval;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_Vgetnext
+(JNIEnv *env, jclass clss, jint vkey, jint elem_ref)
+{
+ int32 rval;
+
+ rval = Vgetnext((int32) vkey, (int32) elem_ref);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)rval;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Vinquire
+(JNIEnv *env, jclass clss, jlong vgroup_id, jintArray n_entries, jobjectArray vgroup_name)
+{
+ intn rval;
+ jclass jc;
+ jstring rstring;
+ char *name;
+ jint * theArg;
+ jobject o;
+ jboolean bb;
+
+ if (n_entries == NULL) {
+ h4nullArgument(env, "Vinquire: n_entries is NULL");
+ } /* end if */
+ else if (vgroup_name == NULL) {
+ h4nullArgument(env, "Vinquire: vgroup_name is NULL");
+ } /* end else if */
+ else if (ENVPTR->GetArrayLength(ENVPAR n_entries) < 1) {
+ h4badArgument(env, "Vinquire: output array n_entries < order 1");
+ } /* end else if */
+ else if (ENVPTR->GetArrayLength(ENVPAR vgroup_name) < 1) {
+ h4badArgument(env, "Vinquire: output array vgroup_name < order 1");
+ } /* end else if */
+ else {
+ name = (char *)HDmalloc(H4_MAX_NC_NAME+1);
+
+ if (name == NULL) {
+ h4outOfMemory(env, "Vinquire");
+ } /* end if */
+ else {
+ theArg = ENVPTR->GetIntArrayElements(ENVPAR n_entries,&bb);
+
+ if (theArg == NULL) {
+ h4JNIFatalError(env, "Vinquire: n_entries not pinned");
+ } /* end if */
+ else {
+ rval = Vinquire((int32) vgroup_id, (int32 *)&(theArg[0]), name);
+
+ name[H4_MAX_NC_NAME] = '\0';
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR n_entries,theArg, JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR n_entries,theArg, 0);
+
+ jc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (jc == NULL) {
+ HDfree(name);
+ return JNI_FALSE;
+ }
+ o = ENVPTR->GetObjectArrayElement(ENVPAR vgroup_name,0);
+ if (o == NULL) {
+ HDfree(name);
+ return JNI_FALSE;
+ }
+ bb = ENVPTR->IsInstanceOf(ENVPAR o,jc);
+ if (bb == JNI_FALSE) {
+ HDfree(name);
+ return JNI_FALSE;
+ }
+ rstring = ENVPTR->NewStringUTF(ENVPAR name);
+ ENVPTR->SetObjectArrayElement(ENVPAR vgroup_name,0,(jobject)rstring);
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ } /* end else */
+ } /* end else */
+
+ HDfree(name);
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_Vinsert
+(JNIEnv *env, jclass clss, jlong vgroup_id, jint v_id)
+{
+ int32 rval;
+
+ rval = Vinsert((int32) vgroup_id, (int32) v_id);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)rval;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_Vnrefs
+(JNIEnv *env, jclass clss, jint vkey, jint tag)
+{
+ int32 rval;
+
+ rval = Vnrefs((int32) vkey, (int32) tag);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)rval;
+}
+
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_Vopen
+(JNIEnv *env, jclass clss, jstring filename, jint access, jshort ndds)
+{
+ intn rval;
+ const char * str;
+
+ PIN_JAVA_STRING(filename, str);
+
+ if (str != NULL) {
+ rval = Vopen((char *)str, (intn) access, (int16) ndds);
+
+ UNPIN_JAVA_STRING(filename, str);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return rval;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Vsetclass
+(JNIEnv *env, jclass clss, jlong vgroup_id, jstring hdfclassname)
+{
+ intn rval;
+ const char * str;
+
+ PIN_JAVA_STRING(hdfclassname, str);
+
+ if (str != NULL) {
+ rval = Vsetclass((int32)vgroup_id, (char *)str);
+
+ UNPIN_JAVA_STRING(hdfclassname, str);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Vsetname
+(JNIEnv *env, jclass clss, jlong vgroup_id, jstring name)
+{
+ intn rval;
+ const char *str;
+
+ PIN_JAVA_STRING(name, str);
+
+ if (str != NULL) {
+ rval = Vsetname((int32)vgroup_id, (char *)str);
+
+ UNPIN_JAVA_STRING(name, str);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Vattrinfo
+(JNIEnv *env, jclass clss, jlong id, jint index, jobjectArray name, jintArray argv)
+{
+ int32 retVal;
+ jint *theArgs;
+ jboolean bb;
+ jclass Sjc;
+ jstring str;
+ jobject o;
+ char nam[256]; /* what is the correct constant??? */
+
+ if (name == NULL) {
+ h4nullArgument(env, "Vattrinfo: name is NULL");
+ } /* end if */
+ else if (argv == NULL) {
+ h4nullArgument(env, "Vattrinfo: argv is NULL");
+ } /* end else if */
+ else if (ENVPTR->GetArrayLength(ENVPAR name) < 1) {
+ h4badArgument(env, "Vattrinfo: output array name < order 1");
+ } /* end else if */
+ else if (ENVPTR->GetArrayLength(ENVPAR argv) < 5) {
+ h4badArgument(env, "Vattrinfo: output array argv < order 5");
+ } /* end else if */
+ else {
+ theArgs = ENVPTR->GetIntArrayElements(ENVPAR argv,&bb);
+
+ if (theArgs == NULL) {
+ h4JNIFatalError(env, "Vattrinfo: argv not pinned");
+ } /* end if */
+ else {
+ retVal = Vattrinfo2((int32)id, (int32)index, nam,
+ (int32 *)&(theArgs[0]), (int32 *)&(theArgs[1]),
+ (int32 *)&(theArgs[2]), (int32 *)&(theArgs[3]), (uint16 *)&(theArgs[4]));
+
+ nam[255] = '\0';
+
+ if (retVal == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv,theArgs,JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR argv,theArgs,0);
+
+ str = ENVPTR->NewStringUTF(ENVPAR nam);
+ o = ENVPTR->GetObjectArrayElement(ENVPAR name,0);
+ if (o == NULL) {
+ return JNI_FALSE;
+ }
+ Sjc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (Sjc == NULL) {
+ return JNI_FALSE;
+ }
+ bb = ENVPTR->IsInstanceOf(ENVPAR o,Sjc);
+ if (bb == JNI_FALSE) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetObjectArrayElement(ENVPAR name,0,(jobject)str);
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ }
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_Vfindattr
+(JNIEnv *env, jclass clss, jlong id, jstring name)
+{
+ int32 retVal;
+ const char *cname;
+
+ PIN_JAVA_STRING(name, cname);
+
+ if (cname != NULL) {
+ retVal = Vfindattr((int32)id, cname);
+
+ UNPIN_JAVA_STRING(name, cname);
+
+ if (retVal == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ return retVal;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Vgetattr
+(JNIEnv *env, jclass clss, jlong gr_id, jint attr_index, jbyteArray values)
+{
+ intn rval;
+ jbyte *arr;
+ jboolean bb;
+
+ if (values == NULL) {
+ h4nullArgument(env, "Vgetattr: values is NULL");
+ } /* end if */
+ else {
+ arr = ENVPTR->GetByteArrayElements(ENVPAR values,&bb);
+
+ if (arr == NULL) {
+ h4JNIFatalError(env, "Vgetattr: values not pinned");
+ } /* end if */
+ else {
+ rval = Vgetattr2((int32) gr_id, (int32) attr_index, (VOIDP) arr);
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR values,arr,JNI_ABORT);
+ CALL_ERROR_CHECK();
+ } /* end if */
+ else {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR values,arr,0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_Vgetversion
+(JNIEnv *env, jclass clss, jlong id)
+{
+ int32 rval;
+
+ rval = Vgetversion((int32) id);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)rval;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_Vnattrs
+(JNIEnv *env, jclass clss, jlong id)
+{
+ int32 rval;
+
+ rval = Vnattrs2((int32) id);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)rval;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Vsetattr__JLjava_lang_String_2JILjava_lang_String_2
+(JNIEnv *env, jclass clss, jlong gr_id, jstring attr_name, jlong data_type, jint count, jstring values)
+{
+ intn rval;
+ const char *str;
+ const char *val;
+
+ PIN_JAVA_STRING_TWO(attr_name, str, values, val);
+
+ if (str != NULL && val != NULL) {
+ rval = Vsetattr((int32) gr_id, (char *)str, (int32) data_type,
+ (int32) count, (VOIDP) val);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ UNPIN_JAVA_STRING_TWO(attr_name, str, values, val);
+ }
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_Vsetattr__JLjava_lang_String_2JI_3B
+(JNIEnv *env, jclass clss, jlong id, jstring attr_name, jlong data_type, jint count, jbyteArray values)
+{
+ intn rval;
+ jbyte *arr;
+ const char *str;
+ jboolean bb;
+
+ if (values == NULL) {
+ h4nullArgument(env, "Vsetattr: values is null");
+ } /* end if */
+ else {
+ arr = ENVPTR->GetByteArrayElements(ENVPAR values,&bb);
+
+ if (arr == NULL) {
+ h4JNIFatalError(env, "Vsetattr: values not pinned");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(attr_name, str);
+
+ if (str != NULL) {
+ rval = Vsetattr((int32) id, (char *)str, (int32) data_type,
+ (int32) count, (VOIDP) arr);
+
+ UNPIN_JAVA_STRING(attr_name, str);
+
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+ }
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR values,arr,JNI_ABORT);
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/src/jni/hdfvhImp.c b/java/src/jni/hdfvhImp.c
new file mode 100644
index 0000000..45915d9
--- /dev/null
+++ b/java/src/jni/hdfvhImp.c
@@ -0,0 +1,151 @@
+
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+/*
+ * This code is the C-interface called by Java programs to access the
+ * HDF 4 library.
+ *
+ * Each routine wraps a single HDF entry point, generally with the
+ * analogous arguments and return codes.
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+#include "hdf.h"
+#include "h4jni.h"
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VHmakegroup
+(JNIEnv *env, jclass oclass, jlong file_id, jintArray tag_array, jintArray ref_array,
+ jint n_objects, jstring vgroup_name, jstring vgroup_class)
+{
+ int32 rval;
+ jint *tags;
+ jint *refs;
+ const char *vname;
+ const char *vcls;
+ jboolean bb;
+
+ PIN_JAVA_STRING_TWO(vgroup_name, vname, vgroup_class, vcls);
+ if (vname != NULL && vcls != NULL) {
+ if (tag_array == NULL) {
+ h4nullArgument(env, "VHmakegroup: tag_array is NULL");
+ } /* end if */
+ else if (ref_array == NULL) {
+ h4nullArgument(env, "VHmakegroup: ref_array is NULL");
+ } /* end if */
+ else {
+ tags = ENVPTR->GetIntArrayElements(ENVPAR tag_array, &bb);
+ if (tags == NULL) {
+ h4JNIFatalError(env, "VHmakegroup: tag_array not pinned");
+ } /* end if */
+ else {
+ refs = ENVPTR->GetIntArrayElements(ENVPAR ref_array, &bb);
+ if (refs == NULL) {
+ h4JNIFatalError(env, "VHmakegroup: ref_array not pinned");
+ } /* end if */
+ else {
+ rval = VHmakegroup((int32)file_id, (int32 *)tags, (int32 *)refs,
+ (int32)n_objects, (char *)vname, (char *)vcls);
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR ref_array, refs, JNI_ABORT);
+ }
+ ENVPTR->ReleaseIntArrayElements(ENVPAR tag_array, tags, JNI_ABORT);
+ }
+ }
+ UNPIN_JAVA_STRING_TWO(vgroup_name, vname, vgroup_class, vcls);
+ }
+ return rval;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VHstoredata
+(JNIEnv *env, jclass oclass, jlong file_id, jstring fieldname, jbyteArray buf, jint n_records,
+ jint data_type, jstring vdata_name, jstring vdata_class)
+{
+ int32 rval;
+ jbyte *buffer;
+ const char *fldname;
+ const char *vname;
+ const char *vcls;
+ jboolean bb;
+
+ PIN_JAVA_STRING_THREE(fieldname, fldname, vdata_name, vname, vdata_class, vcls);
+ if (fldname != NULL && vname != NULL && vcls != NULL) {
+ if (buf == NULL) {
+ h4nullArgument(env, "VHstoredata: buf is NULL");
+ } /* end if */
+ else {
+ buffer = ENVPTR->GetByteArrayElements(ENVPAR buf, &bb);
+ if (buffer == NULL) {
+ h4JNIFatalError(env, "VHstoredatam: buf not pinned");
+ } /* end if */
+ else {
+ rval = VHstoredata((int32) file_id, (char *)fldname,
+ (uint8 *)buffer, (int32)n_records, (int32)data_type,
+ (char *)vname, (char *)vcls);
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+ }
+ ENVPTR->ReleaseByteArrayElements(ENVPAR buf, buffer, JNI_ABORT);
+ }
+ UNPIN_JAVA_STRING_THREE(fieldname, fldname, vdata_name, vname, vdata_class, vcls);
+ }
+ return rval;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VHstoredatam
+(JNIEnv *env, jclass oclass, jlong file_id, jstring fieldname, jbyteArray buf, jint n_records,
+ jint data_type, jstring vdata_name, jstring vdata_class, jint order)
+{
+ int32 rval;
+ jbyte *buffer;
+ const char *fldname;
+ const char *vname;
+ const char *vcls;
+ jboolean bb;
+
+ PIN_JAVA_STRING_THREE(fieldname, fldname, vdata_name, vname, vdata_class, vcls);
+ if (fldname != NULL && vname != NULL && vcls != NULL) {
+ if (buf == NULL) {
+ h4nullArgument(env, "VHstoredatam: buf is NULL");
+ } /* end if */
+ else {
+ buffer = ENVPTR->GetByteArrayElements(ENVPAR buf, &bb);
+ if (buffer == NULL) {
+ h4JNIFatalError(env, "VHstoredatam: buf not pinned");
+ } /* end if */
+ else {
+ rval = VHstoredatam((int32)file_id, (char *)fldname,
+ (uint8 *)buffer, (int32)n_records, (int32)data_type,
+ (char *)vname, (char *)vcls, (int32)order);
+ if (rval < 0)
+ CALL_ERROR_CHECK();
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR buf, buffer, JNI_ABORT);
+ }
+ }
+ UNPIN_JAVA_STRING_THREE(fieldname, fldname, vdata_name, vname, vdata_class, vcls);
+ }
+ return (jint)rval;
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/src/jni/hdfvqImp.c b/java/src/jni/hdfvqImp.c
new file mode 100644
index 0000000..548070f
--- /dev/null
+++ b/java/src/jni/hdfvqImp.c
@@ -0,0 +1,58 @@
+
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+/*
+ * This code is the C-interface called by Java programs to access the
+ * HDF 4 library.
+ *
+ * Each routine wraps a single HDF entry point, generally with the
+ * analogous arguments and return codes.
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf.h"
+#include "jni.h"
+#include "h4jni.h"
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VQueryref
+(JNIEnv *env, jclass clss, jlong vkey)
+{
+ int32 rval;
+
+ rval = VQueryref((int32) vkey);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)rval;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VQuerytag
+(JNIEnv *env, jclass clss, jlong vkey)
+{
+ int32 rval;
+
+ rval = VQuerytag((int32) vkey);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jint)rval;
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/src/jni/hdfvsqImp.c b/java/src/jni/hdfvsqImp.c
new file mode 100644
index 0000000..487449f
--- /dev/null
+++ b/java/src/jni/hdfvsqImp.c
@@ -0,0 +1,282 @@
+
+/****************************************************************************
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Java Products. The full HDF Java copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ ****************************************************************************/
+
+/*
+ * This code is the C-interface called by Java programs to access the
+ * HDF 4 library.
+ *
+ * Each routine wraps a single HDF entry point, generally with the
+ * analogous arguments and return codes.
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+#include "hdf.h"
+#include "jni.h"
+#include "h4jni.h"
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSQuerycount
+(JNIEnv *env, jclass clss, jlong vdata_id, jintArray n_records)
+{
+ intn rval;
+ jint * theArg;
+ jboolean bb;
+
+ if (n_records == NULL) {
+ h4nullArgument(env, "VSQuerycount: n_records is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR n_records) < 1) {
+ h4badArgument(env, "VSQuerycount: output array n_records < order 1");
+ } /* end else if */
+ else {
+ theArg = ENVPTR->GetIntArrayElements(ENVPAR n_records,&bb);
+
+ if (theArg == NULL) {
+ h4JNIFatalError(env, "VSQuerycount: n_records not pinned");
+ } /* end if */
+ else {
+ rval = VSQuerycount((int32) vdata_id, (int32 *)&(theArg[0]));
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR n_records,theArg,JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR n_records,theArg,0);
+ }
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSQueryfields
+(JNIEnv *env, jclass clss, jlong vdata_id, jobjectArray fields)
+{
+ intn rval;
+ char flds[4096];
+ jstring rstring;
+ jclass jc;
+ jobject o;
+ jboolean bb;
+
+ if (fields == NULL) {
+ h4nullArgument(env, "VSQueryfields: fields is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR fields) < 1) {
+ h4badArgument(env, "VSQueryfields: output array fields < order 1");
+ } /* end else if */
+ else {
+ rval = VSQueryfields((int32) vdata_id, (char *)flds);
+ flds[4095] = '\0';
+
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ }
+ else {
+ /* convert it to java string */
+ rstring = ENVPTR->NewStringUTF(ENVPAR flds);
+
+ /* create a Java String object in the calling environment... */
+ jc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (jc == NULL) {
+ return JNI_FALSE; /* exception is raised */
+ }
+ o = ENVPTR->GetObjectArrayElement(ENVPAR fields,0);
+ if (o == NULL) {
+ return JNI_FALSE;
+ }
+ bb = ENVPTR->IsInstanceOf(ENVPAR o,jc);
+ if (bb == JNI_FALSE) {
+ return JNI_FALSE;
+ }
+ ENVPTR->SetObjectArrayElement(ENVPAR fields,0,(jobject)rstring);
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+
+ return JNI_TRUE;
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSQueryinterlace
+(JNIEnv *env, jclass clss, jlong vdata_id, jintArray interlace)
+{
+ intn rval;
+ jint * theArg;
+ jboolean bb;
+
+ if (interlace == NULL) {
+ h4nullArgument(env, "VSQueryinterlace: interlace is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR interlace) < 1) {
+ h4badArgument(env, "VSQueryinterlace: output array interlace < order 1");
+ } /* end else if */
+ else {
+ theArg = ENVPTR->GetIntArrayElements(ENVPAR interlace,&bb);
+
+ if (theArg == NULL) {
+ h4JNIFatalError(env, "VSQueryinterlace: interlace not pinned");
+ } /* end if */
+ else {
+ rval = VSQueryinterlace((int32) vdata_id, (int32 *)&(theArg[0]));
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR interlace,theArg,JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR interlace,theArg,0);
+ }
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSQueryname
+(JNIEnv *env, jclass clss, jlong vdata_id, jobjectArray vdata_name)
+{
+ intn rval;
+ char *nm;
+ jstring rstring;
+ jclass jc;
+ jobject o;
+ jboolean bb;
+
+ if (vdata_name == NULL) {
+ h4nullArgument(env, "VSQueryname: vdata_name is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR vdata_name) < 1) {
+ h4badArgument(env, "VSQueryname: output array vdata_name < order 1");
+ } /* end else if */
+ else {
+ nm = (char *)HDmalloc(VSNAMELENMAX+1);
+
+ if (nm == NULL) {
+ h4outOfMemory(env, "VSQueryname");
+ } /* end if */
+ else {
+ rval = VSQueryname((int32) vdata_id, (char *)nm);
+ nm[VSNAMELENMAX] = '\0';
+
+ if (rval == FAIL) {
+ CALL_ERROR_CHECK();
+ } /* end if */
+ else {
+ /* convert it to java string */
+ rstring = ENVPTR->NewStringUTF(ENVPAR nm);
+
+ /* create a Java String object in the calling environment... */
+ jc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (jc == NULL) {
+ HDfree(nm);
+ return JNI_FALSE; /* exception is raised */
+ }
+ o = ENVPTR->GetObjectArrayElement(ENVPAR vdata_name,0);
+ if (o == NULL) {
+ HDfree(nm);
+ return JNI_FALSE;
+ }
+ bb = ENVPTR->IsInstanceOf(ENVPAR o,jc);
+ if (bb == JNI_FALSE) {
+ free(nm);
+ return JNI_FALSE;
+ }
+ ENVPTR->SetObjectArrayElement(ENVPAR vdata_name,0,(jobject)rstring);
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ } /* end else */
+
+ HDfree(nm);
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VSQueryref
+(JNIEnv *env, jclass clss, jlong vdata_id)
+{
+ int32 rval;
+
+ rval = VSQueryref((int32)vdata_id);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jlong)rval;
+}
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdflib_HDFLibrary_VSQuerytag
+(JNIEnv *env, jclass clss, jlong vdata_id)
+{
+ int32 rval;
+
+ rval = VSQuerytag((int32)vdata_id);
+ if (rval == FAIL)
+ CALL_ERROR_CHECK();
+
+ return (jlong)rval;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdflib_HDFLibrary_VSQueryvsize
+(JNIEnv *env, jclass clss, jlong vdata_id, jintArray vdata_size)
+{
+ intn rval;
+ jint * theArg;
+ jboolean bb;
+
+ if (vdata_size == NULL) {
+ h4nullArgument(env, "VSQuerysize: vdata_size is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR vdata_size) < 1) {
+ h4badArgument(env, "VSQueryvsize: output array vdata_size < order 1");
+ } /* end else if */
+ else {
+ theArg = ENVPTR->GetIntArrayElements(ENVPAR vdata_size,&bb);
+
+ if (theArg == NULL) {
+ h4JNIFatalError(env, "VSQuerysize: vdata_size not pinned");
+ } /* end if */
+ else {
+ rval = VSQueryvsize((int32) vdata_id, (int32 *)&(theArg[0]));
+
+ if (rval == FAIL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR vdata_size,theArg,JNI_ABORT);
+ CALL_ERROR_CHECK();
+ }
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR vdata_size,theArg,0);
+ }
+ } /* end else */
+ } /* end else */
+
+ return JNI_TRUE;
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/test/CMakeLists.txt b/java/test/CMakeLists.txt
new file mode 100644
index 0000000..2123427
--- /dev/null
+++ b/java/test/CMakeLists.txt
@@ -0,0 +1,100 @@
+cmake_minimum_required (VERSION 3.1.0)
+PROJECT (HDF4_JAVA_TEST Java)
+
+set (CMAKE_VERBOSE_MAKEFILE 1)
+
+INCLUDE_DIRECTORIES (
+ ${HDF4_JAVA_JNI_BINARY_DIR}
+ ${HDF4_JAVA_HDF_LIB_DIR}
+)
+
+set (HDF4_JAVA_TEST_SRCS
+ TestH4.java
+ TestH4ANparams.java
+ TestH4DFparams.java
+ TestH4DFPparams.java
+ TestH4DFRparams.java
+ TestH4GRparams.java
+ TestH4HCparams.java
+ TestH4SDparams.java
+ TestH4Vparams.java
+ TestH4VSparams.java
+ TestAll.java
+)
+
+file (WRITE ${PROJECT_BINARY_DIR}/Manifest.txt
+"Main-Class: test.TestAll
+"
+)
+
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF4_JAVA_LIB_DIR}/junit.jar;${HDF4_JAVA_LIB_DIR}/hamcrest-core.jar;${HDF4_JAVA_JARS};${HDF4_JAVA_LOGGING_JAR};${HDF4_JAVA_LOGGING_SIMPLE_JAR}")
+
+add_jar (${HDF4_JAVA_TEST_LIB_TARGET} MANIFEST ${PROJECT_BINARY_DIR}/Manifest.txt ${HDF4_JAVA_TEST_SRCS})
+
+get_target_property (${HDF4_JAVA_TEST_LIB_TARGET}_JAR_FILE ${HDF4_JAVA_TEST_LIB_TARGET} JAR_FILE)
+#install_jar (${HDF4_JAVA_TEST_LIB_TARGET} ${HJAVA_INSTALL_DATA_DIR}/tests tests)
+#get_target_property (${HDF4_JAVA_TEST_LIB_TARGET}_CLASSPATH ${HDF4_JAVA_TEST_LIB_TARGET} CLASSDIR)
+
+add_dependencies (${HDF4_JAVA_TEST_LIB_TARGET} ${HDF4_JAVA_HDF_LIB_TARGET})
+set_target_properties (${HDF4_JAVA_TEST_LIB_TARGET} PROPERTIES FOLDER test/java)
+
+set (HDF_JAVA_TEST_FILES
+ JUnit-interface.txt
+ JUnit-interface.ert
+)
+
+foreach (h4_file ${HDF_JAVA_TEST_FILES})
+ set (dest "${PROJECT_BINARY_DIR}/${h4_file}")
+ #message (STATUS " Copying ${h4_file}")
+ add_custom_command (
+ TARGET ${HDF4_JAVA_TEST_LIB_TARGET}
+ POST_BUILD
+ COMMAND ${CMAKE_COMMAND}
+ ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/${h4_file} ${dest}
+ )
+endforeach (h4_file ${HDF_JAVA_TEST_FILES})
+
+if (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";")
+else (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
+endif (WIN32)
+
+set (CMAKE_JAVA_CLASSPATH ".")
+foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
+ set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
+endforeach (CMAKE_INCLUDE_PATH)
+set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${HDF4_JAVA_TEST_LIB_TARGET}_JAR_FILE}")
+set (testfilter "OK (598 tests)")
+
+if (CMAKE_BUILD_TYPE MATCHES Debug)
+ set (CMD_ARGS "-Dhdf.hdflib.HDFLibrary.loadLibraryName=hdf_java_debug;")
+endif(CMAKE_BUILD_TYPE MATCHES Debug)
+
+add_test (
+ NAME JUnit-interface-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ JUnit-interface.out
+ JUnit-interface.out.err
+)
+
+add_test (
+ NAME JUnit-interface
+ COMMAND "${CMAKE_COMMAND}"
+ -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}"
+ -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}"
+ -D "TEST_ARGS:STRING=${CMD_ARGS}-ea;org.junit.runner.JUnitCore"
+ -D "TEST_PROGRAM=test.TestAll"
+ -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}"
+ -D "TEST_FOLDER=${HDF4_BINARY_DIR}/java/test"
+ -D "TEST_OUTPUT=JUnit-interface.out"
+# -D "TEST_LOG_LEVEL=trace"
+ -D "TEST_EXPECT=0"
+ -D "SKIP_APPEND=1"
+ -D "TEST_MASK_ERROR=TRUE"
+ -D "TEST_FILTER:STRING=${testfilter}"
+ -D "TEST_REFERENCE=JUnit-interface.txt"
+ -P "${HDF_RESOURCES_DIR}/jrunTest.cmake"
+)
+set_tests_properties (JUnit-interface PROPERTIES DEPENDS "JUnit-interface-clearall-objects")
diff --git a/java/test/JUnit-interface.ert b/java/test/JUnit-interface.ert
new file mode 100644
index 0000000..277562f
--- /dev/null
+++ b/java/test/JUnit-interface.ert
@@ -0,0 +1,2 @@
+[main] INFO hdf.hdflib.HDFLibrary - HDF4 library: hdf_java
+[main] INFO hdf.hdflib.HDFLibrary - successfully loaded from java.library.path
diff --git a/java/test/JUnit-interface.txt b/java/test/JUnit-interface.txt
new file mode 100644
index 0000000..dea1257
--- /dev/null
+++ b/java/test/JUnit-interface.txt
@@ -0,0 +1,367 @@
+JUnit version 4.11
+.testCreateCloseOpen
+.testDFKNTsize
+.testHgetlibversion
+.testJ2C
+.testHnumber
+.testHishdf
+.testHDgetNTdesc
+.testANendIllegalId
+.testANannlistNull
+.testANfileinfoIllegalId
+.testANwriteannIllegalId
+.testANid2tagrefNull
+.testANfileinfoNull
+.testANtagref2idIllegalId
+.testANget_tagrefArgument
+.testANendaccessIllegalId
+.testANget_tagrefIllegalId
+.testANtag2atypeIllegalId
+.testANwriteannNull
+.testANatype2tagIllegalId
+.testANcreatefIllegalId
+.testANcreateIllegalId
+.testANnumannIllegalId
+.testANid2tagrefIllegalId
+.testANstartIllegalId
+.testANfileinfoArgument
+.testANget_tagrefNull
+.testANannlistIllegalId
+.testANreadannIllegalId
+.testANselectIllegalId
+.testANid2tagrefArgument
+.testANannlenIllegalId
+.testDF24putimageIllegalArgument
+.testDF24putimageNullImage
+.testDF24getdimsIllegalArgument
+.testDF24getdimsNullArguments
+.testDF24readrefIllegalRef
+.testDF24getimageNullImageData
+.testDF24putimageNullFilename
+.testDF24getimageNullFilename
+.testDF24setilIllegalIl
+.testDF24readrefNullFilename
+.testDF24getdimsNullFilename
+.testDF24addimageNullImage
+.testDF24nimagesNullFilename
+.testDF24setcompressNullCompInfo
+.testDF24addimageNullFilename
+.testDFPaddpalNullPalette
+.testDFPputpalNull
+.testDFPwriterefNull
+.testDFPgetpalNull
+.testDFPputpalNullPalette
+.testDFPgetpalNullPalette
+.testDFPaddpalNull
+.testDFPnpalsNull
+.testDFPreadrefNull
+.testDFPputpalArgNull
+.testDFR8nimagesNullFilename
+.testDFR8readrefIllegalRef
+.testDFR8getdimsNullPalette
+.testDFR8getpalrefNullPalRef
+.testDFR8setcompressNullCompInfo
+.testDFR8getdimsNullDimensions
+.testDFR8putimageNullFilename
+.testDFR8getimageNullFilename
+.testDFR8writerefNullFilename
+.testDFR8addimageNullImage
+.testDFR8getimageNullImageData
+.testDFR8getpalrefIllegalArgument
+.testDFR8getdimsIllegalArgument
+.testDFR8readrefNullFilename
+.testDFR8putimageNullImage
+.testDFR8addimageNullFilename
+.testDFR8getdimsNullFilename
+.testGRsetattrStrNullName
+.testGRsetattrStrNullVals
+.testGRidtorefIllegalId
+.testGRluttorefIllegalId
+.testGRendIllegalId
+.testGRattrinfoNullArgs
+.testGRattrinfoNullName
+.testGRreadchunkIllegalId
+.testGRgetcompressIllegalId
+.testGRreqimageilIllegalId
+.testGRwritelutIllegalId
+.testGRwriteimageIllegalId
+.testGRsetexternalfileNull
+.testGRselectIllegalId
+.testGRfileinfoNull
+.testGRreadimageNullCount
+.testGRreadimageNullStart
+.testGRnametoindexIllegalId
+.testGRattrinfoArgumentArgs
+.testGRgetcompressNull
+.testGRstartIllegalId
+.testGRgetlutidIllegalId
+.testGRsetattrIllegalId
+.testGRattrinfoIllegalId
+.testGRsetchunkcacheIllegalId
+.testGRreqlutilIllegalId
+.testGRreadchunkNullArgument
+.testGRgetchunkinfoIllegalId
+.testGRgetnlutsIllegalId
+.testGRgetiminfoNullArgs
+.testGRgetiminfoNullDims
+.testGRgetiminfoNullName
+.testGRsetexternalfileIllegalId
+.testGRreadchunkNull
+.testGRfindattrIllegalId
+.testGRfileinfoArgument
+.testGRsetcompressNull
+.testGRcreateNullDims
+.testGRcreateNullName
+.testGRfindattrNull
+.testGRreadimageIllegalId
+.testGRnametoindexNull
+.testGRsetchunkIllegalId
+.testGRgetlutinfoIllegalId
+.testGRwriteimageNullData
+.testGRreftoindexIllegalId
+.testGRgetiminfoArgumentArgs
+.testGRgetiminfoArgumentDims
+.testGRwriteimageNullCount
+.testGRreadlutIllegalId
+.testGRwriteimageNullStart
+.testGRgetattrIllegalId
+.testGRcreateArgumentDims
+.testGRsetchunkNull
+.testGRwritelutNull
+.testGRgetiminfoIllegalId
+.testGRreadimageNullData
+.testGRsetattrStrIllegalId
+.testGRsetattrNullName
+.testGRsetattrNullVals
+.testGRreadlutNull
+.testGRgetlutinfoNull
+.testGRendaccessIllegalId
+.testGRgetlutinfoArgument
+.testGRgetattrNull
+.testGRgetchunkinfoNull
+.testGRcreateIllegalId
+.testGRfileinfoIllegalId
+.testGRsetcompressIllegalId
+.testGRreadchunkArgument
+.testHCget_config_infoIllegalCoderType
+.testSSDgetcalArgument
+.testSDsetcompressNull
+.testSDgetcalIllegalId
+.testSDsetaccesstypeIllegalId
+.testSDsetrangeNullMax
+.testSDsetrangeNullMin
+.testSDsetnbitdatasetIllegalId
+.testSDcheckemptyNull
+.testSDnametoindexNull
+.testSDgetdimscaleNull
+.testSDsetexternalfileNull
+.testSDreaddata_longIllegalId
+.testSDreadattrIllegalId
+.testSDreaddataIllegalId
+.testSDwritechunkNull
+.testSDsetdimstrsIllegalId
+.testSDsetchunkcacheIllegalId
+.testSDfindattrNull
+.testSDgetdatastrsNull
+.testSDsetattrIllegalId
+.testSDgetdimstrsArgument
+.testSDcreateNullDims
+.testSDcreateNullName
+.testSDsetcalIllegalId
+.testSDgetcompinfoIllegalId
+.testSDgetfillvalueNull
+.testSDattrinfoIllegalId
+.testSDgetinfoArgumentArgs
+.testSDwritedataNullCount
+.testSDwritedataNullStart
+.testSDsetrangeIllegalId
+.testSDsetdimscaleNull
+.testSDreaddata_floatIllegalId
+.testSDreaddataNullData
+.testSDfileinfoArgument
+.testSDsetattrNullName
+.testSDsetattrNullVals
+.testSDisdimval_bwcompIllegalId
+.testSDsetexternalfileIllegalId
+.testSDreadchunkNull
+.testSDsetchunkNull
+.testSDreaddata_doubleIllegalId
+.testSDgetdatastrsArgument
+.testSDsetdimnameNull
+.testSDfindattrIllegalId
+.testSDcreateIllegalId
+.testSDdiminfoIllegalId
+.testSDgetdimstrsIllegalId
+.testSDgetrangeIllegalId
+.testSDsetfillvalueIllegalId
+.testSDgetcalNullArgument
+.testSDwritechunkNullArgument
+.testSDreadchunkNullArgument
+.testSDendaccessIllegalId
+.testSDendIllegalId
+.testSDsetdimval_compIllegalId
+.testSDsetcompressIllegalId
+.testSDsetchunkIllegalId
+.testSDsetdatastrsIllegalId
+.testSDstartNull
+.testSDsetdimnameIllegalId
+.testSDselectIllegalId
+.testSDreftoindexIllegalId
+.testSDwritedataIllegalId
+.testSDreaddataNullCount
+.testSDreaddataNullStart
+.testSDgetcalNull
+.testSDdiminfoArgumentArgs
+.testSDdiminfoNullArgs
+.testSDdiminfoNullName
+.testSDgetdatastrsIllegalId
+.testSDsetdimscaleIllegalId
+.testSDfileinfoNull
+.testSDgetinfoNullArgs
+.testSDgetinfoNullDims
+.testSDgetinfoNullName
+.testSDsetblocksizeIllegalId
+.testSDcheckemptyIllegalId
+.testSDreadattrNull
+.testSDgetdimidIllegalId
+.testSDwritedataNullData
+.testSDgetdimstrsNull
+.testSDidtorefIllegalId
+.testSDgetrangeNullMax
+.testSDgetrangeNullMin
+.testSDsetfillvalueNull
+.testSDfileinfoIllegalId
+.testSDgetinfoIllegalId
+.testSDgetdimscaleIllegalId
+.testSDattrinfoNullArgs
+.testSDattrinfoNullName
+.testSDgetcompinfoNull
+.testSDnametoindexIllegalId
+.testSDattrinfoArgumentArgs
+.testSDsetfillmodeIllegalId
+.testSDiscoordvarIllegalId
+.testSDgetfillvalueIllegalId
+.testSDreaddata_shortIllegalId
+.testSDreaddata_intIllegalId
+.testVgetclassNullClassName
+.testVisvsIllegalId
+.testVsetattrNullName2
+.testVinquireIllegalNEntriesArgument
+.testVgetnameIllegalArgument
+.testVfindclassNullClassName
+.testVattrinfoNullArgv
+.testVattrinfoNullName
+.testVattrinfoIllegalArgvArgument
+.testVgetattrNullData
+.testVsetattrNullDataByteArray
+.testVgettagrefsIllegalTagsArgument
+.testVloneNullRefArray
+.testVattrinfoIllegalId
+.testVsetclassIllegalId
+.testVattachNullAccess
+.testVloneIllegalSizeRefArray
+.testVgettagrefsIllegalId
+.testVgetnameNullName
+.testVgettagrefNullTagRef
+.testVfindNullVGroupName
+.testVfindattrIllegalId
+.testVsetattrNullName
+.testVgetclassIllegalId
+.testVsetattrIllegalId
+.testVflocateIllegalKey
+.testVsetattrNullValuesString
+.testVattachIllegalId
+.testVsetnameIllegalId
+.testVgettagrefsIllegalRefsArgument
+.testVsetnameNullName
+.testVsetclassNullClassName
+.testVattrinfoIllegalNameArgument
+.testVisvgIllegalId
+.testVinquireNullNEntries
+.testVflocateNullClassName
+.testVsetattrIllegalId2
+.testVfindattrNullName
+.testVinquireIllegalVGroupNameArgument
+.testVgetclassIllegalArgument
+.testVinquireNullVGroupName
+.testVgettagrefIllegalId
+.testVgetattrIllegalId
+.testVinquireIllegalId
+.testVgettagrefIllegalTagRefArgument
+.testVgettagrefsNullRefs
+.testVgettagrefsNullTags
+.testVgetnameIllegalId
+.testVSsizeofIllegalId
+.testVSattachNullAccess
+.testVSreadNullDataBuffer
+.testVSsetnumblocksIllegalId
+.testVSfindexNullName
+.testVSsetfieldsNullFields
+.testVSattrinfoIllegalNameArgument
+.testVSQueryfieldsNullFields
+.testVSQuerycountIllegalId
+.testVSQueryfieldsIllegalArgument
+.testVSattrinfoIllegalId
+.testVSsetclassIllegalId
+.testVSsetattrIllegalId
+.testVSfindclassNullClass
+.testVSinquireNullIArgs
+.testVSinquireNullSArgs
+.testVSsizeofNullFields
+.testVSsetnameIllegalId
+.testVSgetattrNullData
+.testVSsetattrNullAttributeName
+.testVSQueryvsizeIllegalArgument
+.testVSQueryinterlaceIllegalId
+.testVSfindattrIllegalId
+.testVSQuerycountNullNRecords
+.testVSsetexternalfileNullFilename
+.testVSgetclassIllegalId
+.testVSgetnameNullName
+.testVSfindNullVDataName
+.testVSsetexternalfileIllegalVKey
+.testVSfindattrNullName
+.testVSgetfieldsIllegalId
+.testVSsetblocksizeIllegalId
+.testVSsetattrNullAttributeName2
+.testVSQueryinterlaceNullInterlace
+.testVSfindexIllegalId
+.testVSQuerycountIllegalArgument
+.testVSfdefineIllegalId
+.testVSattachIllegalId
+.testVSinquireIllegalIArgs
+.testVSinquireIllegalSArgs
+.testVSfexistNullFields
+.testVSsetattrIllegalId2
+.testVSgetattrIllegalId
+.testVSinquireIllegalId
+.testVSsetnameNullVDataName
+.testVSgetfieldsNullFieldName
+.testVSQuerynameNullVDataName
+.testVSwriteIllegalId
+.testVSsetinterlaceIllegalId
+.testVSgetnameIllegalId
+.testVSQuerynameIllegalId
+.testVSQueryinterlaceIllegalArgument
+.testVSsetfieldsIllegalId
+.testVSQueryfieldsIllegalId
+.testVSsetclassNullClass
+.testVSQueryvsizeNullVDataSize
+.testVSgetclassNullClassName
+.testVSattachIllegalReg
+.testVSfindexNullIndexArray
+.testVSfexistIllegalId
+.testVSattrinfoIllegalAttributeArgs
+.testVSwriteNullDataBuffer
+.testVSappendableIllegalVKey
+.testVSattrinfoNullArgs
+.testVSattrinfoNullName
+.testVSQueryvsizeIllegalId
+.testVSQuerynameIllegalArgument
+.testVSfdefineNullFieldName
+.testVSloneNullRefArray
+
+Time: XXXX
+
+OK (361 tests)
+
diff --git a/java/test/Makefile.am b/java/test/Makefile.am
new file mode 100644
index 0000000..7271da1
--- /dev/null
+++ b/java/test/Makefile.am
@@ -0,0 +1,62 @@
+#
+# HDF Java native interface (JNI) Library Test Makefile(.in)
+
+include $(top_srcdir)/config/commence.am
+
+# Mark this directory as part of the JNI API
+JAVA_API=yes
+
+JAVAROOT = .classes
+
+classes:
+ test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
+
+pkgpath = test
+hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
+jarfile = jar$(PACKAGE_TARNAME)test.jar
+
+CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/junit.jar:$(top_srcdir)/java/lib/hamcrest-core.jar:$(top_srcdir)/java/lib/slf4j-api-1.7.5.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.5.jar:$$CLASSPATH
+AM_JAVACFLAGS = $(H4_JAVACFLAGS) -deprecation
+
+noinst_JAVA = \
+ TestH4.java \
+ TestH4ANparams.java \
+ TestH4DFparams.java \
+ TestH4DFPparams.java \
+ TestH4DFRparams.java \
+ TestH4GRparams.java \
+ TestH4HCparams.java \
+ TestH4SDparams.java \
+ TestH4Vparams.java \
+ TestH4VSparams.java \
+ TestAll.java
+
+$(jarfile): classnoinst.stamp classes
+ $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
+
+noinst_DATA = $(jarfile)
+
+.PHONY: classes
+
+check_SCRIPTS = junit.sh
+TEST_SCRIPT = $(check_SCRIPTS)
+
+CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class junit.sh
+
+#JAVA_JUNIT = $(JAVA_SRCS)
+#noinst_JAVA = @JAVA_JUNIT@
+#EXTRA_JAVA = $(JAVA_JUNIT)
+TESTS_JUNIT = AllJunitTests
+#EXTRA_TEST = $(TESTS_JUNIT)
+
+AllJunitTests :
+ echo "#! /bin/sh" > $@
+ echo "exec @JUNIT@ test.TestAll" >> $@
+ chmod +x $@
+
+clean:
+ rm -rf $(JAVAROOT)/*
+ rm -f $(jarfile)
+ rm -f classnoinst.stamp
+
+include $(top_srcdir)/config/conclude.am
diff --git a/mfhdf/nctest/Makefile.in b/java/test/Makefile.in
similarity index 76%
copy from mfhdf/nctest/Makefile.in
copy to java/test/Makefile.in
index 4e60508..eefa718 100644
--- a/mfhdf/nctest/Makefile.in
+++ b/java/test/Makefile.in
@@ -14,8 +14,9 @@
@SET_MAKE@
-#############################################################################
-#############################################################################
+#
+# HDF Java native interface (JNI) Library Test Makefile(.in)
+
VPATH = @srcdir@
am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)'
am__make_running_with_option = \
@@ -83,30 +84,30 @@ host_triplet = @host@
DIST_COMMON = $(top_srcdir)/config/commence.am \
$(top_srcdir)/config/conclude.am $(srcdir)/Makefile.in \
$(srcdir)/Makefile.am $(top_srcdir)/bin/mkinstalldirs \
- $(top_srcdir)/bin/depcomp $(top_srcdir)/bin/test-driver README
-check_PROGRAMS = nctest$(EXEEXT)
-TESTS = $(am__EXEEXT_1)
-subdir = mfhdf/nctest
+ $(srcdir)/junit.sh.in $(top_srcdir)/bin/test-driver
+TESTS = $(TEST_SCRIPT)
+subdir = java/test
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
CONFIG_HEADER = $(top_builddir)/hdf/src/h4config.h
-CONFIG_CLEAN_FILES =
+CONFIG_CLEAN_FILES = junit.sh
CONFIG_CLEAN_VPATH_FILES =
-am_nctest_OBJECTS = add.$(OBJEXT) atttests.$(OBJEXT) \
- cdftests.$(OBJEXT) dimtests.$(OBJEXT) driver.$(OBJEXT) \
- emalloc.$(OBJEXT) error.$(OBJEXT) misctest.$(OBJEXT) \
- rec.$(OBJEXT) slabs.$(OBJEXT) val.$(OBJEXT) \
- varget_unlim.$(OBJEXT) varget.$(OBJEXT) vargetg.$(OBJEXT) \
- varput.$(OBJEXT) varputg.$(OBJEXT) vardef.$(OBJEXT) \
- vartests.$(OBJEXT) vputget.$(OBJEXT) vputgetg.$(OBJEXT)
-nctest_OBJECTS = $(am_nctest_OBJECTS)
-AM_V_lt = $(am__v_lt_ at AM_V@)
-am__v_lt_ = $(am__v_lt_ at AM_DEFAULT_V@)
-am__v_lt_0 = --silent
-am__v_lt_1 =
AM_V_P = $(am__v_P_ at AM_V@)
am__v_P_ = $(am__v_P_ at AM_DEFAULT_V@)
am__v_P_0 = false
@@ -119,54 +120,16 @@ AM_V_at = $(am__v_at_ at AM_V@)
am__v_at_ = $(am__v_at_ at AM_DEFAULT_V@)
am__v_at_0 = @
am__v_at_1 =
-DEFAULT_INCLUDES = -I. at am__isrc@ -I$(top_builddir)/hdf/src
-depcomp = $(SHELL) $(top_srcdir)/bin/depcomp
-am__depfiles_maybe = depfiles
-am__mv = mv -f
-COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
- $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
-LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
- $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \
- $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
- $(AM_CFLAGS) $(CFLAGS)
-AM_V_CC = $(am__v_CC_ at AM_V@)
-am__v_CC_ = $(am__v_CC_ at AM_DEFAULT_V@)
-am__v_CC_0 = @echo " CC " $@;
-am__v_CC_1 =
-CCLD = $(CC)
-LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
- $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
- $(AM_LDFLAGS) $(LDFLAGS) -o $@
-AM_V_CCLD = $(am__v_CCLD_ at AM_V@)
-am__v_CCLD_ = $(am__v_CCLD_ at AM_DEFAULT_V@)
-am__v_CCLD_0 = @echo " CCLD " $@;
-am__v_CCLD_1 =
-SOURCES = $(nctest_SOURCES)
-DIST_SOURCES = $(nctest_SOURCES)
+SOURCES =
+DIST_SOURCES =
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
+am__java_sources = $(noinst_JAVA)
+DATA = $(noinst_DATA)
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
-# Read a list of newline-separated strings from the standard input,
-# and print each of them once, without duplicates. Input order is
-# *not* preserved.
-am__uniquify_input = $(AWK) '\
- BEGIN { nonempty = 0; } \
- { items[$$0] = 1; nonempty = 1; } \
- END { if (nonempty) { for (i in items) print i; }; } \
-'
-# Make sure the list of sources is unique. This is necessary because,
-# e.g., the same source file might be shared among _SOURCES variables
-# for different programs/libraries.
-am__define_uniq_tagged_files = \
- list='$(am__tagged_files)'; \
- unique=`for i in $$list; do \
- if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
- done | $(am__uniquify_input)`
-ETAGS = etags
-CTAGS = ctags
am__tty_colors_dummy = \
mgn= red= grn= lgn= blu= brg= std=; \
am__color_tests=no
@@ -351,10 +314,12 @@ am__set_TESTS_bases = \
bases=`echo $$bases`
RECHECK_LOGS = $(TEST_LOGS)
AM_RECURSIVE_TARGETS = check recheck
-am__EXEEXT_1 = nctest$(EXEEXT)
TEST_SUITE_LOG = test-suite.log
-LOG_DRIVER = $(SHELL) $(top_srcdir)/bin/test-driver
-LOG_COMPILE = $(LOG_COMPILER) $(AM_LOG_FLAGS) $(LOG_FLAGS)
+am__test_logs1 = $(TESTS:=.log)
+am__test_logs2 = $(am__test_logs1:@EXEEXT at .log=.log)
+TEST_LOGS = $(am__test_logs2:.sh.log=.log)
+SH_LOG_DRIVER = $(SHELL) $(top_srcdir)/bin/test-driver
+SH_LOG_COMPILE = $(SH_LOG_COMPILER) $(AM_SH_LOG_FLAGS) $(SH_LOG_FLAGS)
am__set_b = \
case '$@' in \
*/*) \
@@ -365,11 +330,6 @@ am__set_b = \
*) \
b='$*';; \
esac
-am__test_logs1 = $(TESTS:=.log)
-am__test_logs2 = $(am__test_logs1:@EXEEXT at .log=.log)
-TEST_LOGS = $(am__test_logs2:.sh.log=.log)
-SH_LOG_DRIVER = $(SHELL) $(top_srcdir)/bin/test-driver
-SH_LOG_COMPILE = $(SH_LOG_COMPILER) $(AM_SH_LOG_FLAGS) $(SH_LOG_FLAGS)
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = @ACLOCAL@
AMTAR = @AMTAR@
@@ -413,12 +373,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -461,11 +436,18 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+
+#JAVA_JUNIT = $(JAVA_SRCS)
+#noinst_JAVA = @JAVA_JUNIT@
+#EXTRA_JAVA = $(JAVA_JUNIT)
+TESTS_JUNIT = AllJunitTests
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
@@ -538,30 +520,33 @@ ACLOCAL_AMFLAGS = "-I m4"
# .chkexe files are used to mark tests that have run successfully.
# .chklog files are output from those tests.
-
-#############################################################################
-#############################################################################
-CHECK_CLEANFILES = *.chkexe *.chklog test2.nc test.nc
-nctest_INCLUDES = -I$(top_srcdir)/hdf/src \
- -I$(top_srcdir)/mfhdf/libsrc \
- -I$(top_builddir)/mfhdf/libsrc
-
-DEFINES = -DNDEBUG -DHDF
-AM_CPPFLAGS = $(nctest_INCLUDES) $(DEFINES)
-
-#############################################################################
-#############################################################################
-TEST_PROG = nctest
-
-# Information for building the "ncgen" program
-nctest_SOURCES = add.c atttests.c cdftests.c dimtests.c driver.c emalloc.c \
- error.c misctest.c rec.c slabs.c val.c varget_unlim.c \
- varget.c vargetg.c varput.c varputg.c vardef.c vartests.c \
- vputget.c vputgetg.c
-
-nctest_LDADD = $(LIBMFHDF) $(LIBHDF)
-nctest_DEPENDENCIES = $(LIBMFHDF) $(LIBHDF)
-DISTCLEANFILES =
+CHECK_CLEANFILES = *.chkexe *.chklog
+
+# Mark this directory as part of the JNI API
+JAVA_API = yes
+JAVAROOT = .classes
+pkgpath = test
+hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
+jarfile = jar$(PACKAGE_TARNAME)test.jar
+CLASSPATH_ENV = CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/junit.jar:$(top_srcdir)/java/lib/hamcrest-core.jar:$(top_srcdir)/java/lib/slf4j-api-1.7.5.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.5.jar:$$CLASSPATH
+AM_JAVACFLAGS = $(H4_JAVACFLAGS) -deprecation
+noinst_JAVA = \
+ TestH4.java \
+ TestH4ANparams.java \
+ TestH4DFparams.java \
+ TestH4DFPparams.java \
+ TestH4DFRparams.java \
+ TestH4GRparams.java \
+ TestH4HCparams.java \
+ TestH4SDparams.java \
+ TestH4Vparams.java \
+ TestH4VSparams.java \
+ TestAll.java
+
+noinst_DATA = $(jarfile)
+check_SCRIPTS = junit.sh
+TEST_SCRIPT = $(check_SCRIPTS)
+CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class junit.sh
# Automake needs to be taught how to build lib, progs, and tests targets.
# These will be filled in automatically for the most part (e.g.,
@@ -583,7 +568,7 @@ TEST_SCRIPT_CHKSH = $(TEST_SCRIPT:=.chkexe_)
all: all-am
.SUFFIXES:
-.SUFFIXES: .c .lo .log .o .obj .sh .sh$(EXEEXT) .trs
+.SUFFIXES: .log .sh .sh$(EXEEXT) .trs
$(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(top_srcdir)/config/commence.am $(top_srcdir)/config/conclude.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
@@ -593,9 +578,9 @@ $(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(top_srcdir
exit 1;; \
esac; \
done; \
- echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign mfhdf/nctest/Makefile'; \
+ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign java/test/Makefile'; \
$(am__cd) $(top_srcdir) && \
- $(AUTOMAKE) --foreign mfhdf/nctest/Makefile
+ $(AUTOMAKE) --foreign java/test/Makefile
.PRECIOUS: Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
@@ -615,67 +600,8 @@ $(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps)
$(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
-
-clean-checkPROGRAMS:
- @list='$(check_PROGRAMS)'; test -n "$$list" || exit 0; \
- echo " rm -f" $$list; \
- rm -f $$list || exit $$?; \
- test -n "$(EXEEXT)" || exit 0; \
- list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
- echo " rm -f" $$list; \
- rm -f $$list
-
-nctest$(EXEEXT): $(nctest_OBJECTS) $(nctest_DEPENDENCIES) $(EXTRA_nctest_DEPENDENCIES)
- @rm -f nctest$(EXEEXT)
- $(AM_V_CCLD)$(LINK) $(nctest_OBJECTS) $(nctest_LDADD) $(LIBS)
-
-mostlyclean-compile:
- -rm -f *.$(OBJEXT)
-
-distclean-compile:
- -rm -f *.tab.c
-
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/add.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/atttests.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/cdftests.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/dimtests.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/driver.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/emalloc.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/error.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/misctest.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/rec.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/slabs.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/val.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/vardef.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/varget.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/varget_unlim.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/vargetg.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/varput.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/varputg.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/vartests.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/vputget.Po at am__quote@
- at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/vputgetg.Po at am__quote@
-
-.c.o:
- at am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
- at am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
- at AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
- at AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
- at am__fastdepCC_FALSE@ $(AM_V_CC at am__nodep@)$(COMPILE) -c -o $@ $<
-
-.c.obj:
- at am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
- at am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
- at AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
- at AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
- at am__fastdepCC_FALSE@ $(AM_V_CC at am__nodep@)$(COMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
-
-.c.lo:
- at am__fastdepCC_TRUE@ $(AM_V_CC)$(LTCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
- at am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo
- at AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
- at AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
- at am__fastdepCC_FALSE@ $(AM_V_CC at am__nodep@)$(LTCOMPILE) -c -o $@ $<
+junit.sh: $(top_builddir)/config.status $(srcdir)/junit.sh.in
+ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@
mostlyclean-libtool:
-rm -f *.lo
@@ -683,57 +609,25 @@ mostlyclean-libtool:
clean-libtool:
-rm -rf .libs _libs
-ID: $(am__tagged_files)
- $(am__define_uniq_tagged_files); mkid -fID $$unique
-tags: tags-am
-TAGS: tags
-
-tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
- set x; \
- here=`pwd`; \
- $(am__define_uniq_tagged_files); \
- shift; \
- if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
- test -n "$$unique" || unique=$$empty_fix; \
- if test $$# -gt 0; then \
- $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
- "$$@" $$unique; \
- else \
- $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
- $$unique; \
- fi; \
- fi
-ctags: ctags-am
-
-CTAGS: ctags
-ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
- $(am__define_uniq_tagged_files); \
- test -z "$(CTAGS_ARGS)$$unique" \
- || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
- $$unique
-
-GTAGS:
- here=`$(am__cd) $(top_builddir) && pwd` \
- && $(am__cd) $(top_srcdir) \
- && gtags -i $(GTAGS_ARGS) "$$here"
-cscopelist: cscopelist-am
-
-cscopelist-am: $(am__tagged_files)
- list='$(am__tagged_files)'; \
- case "$(srcdir)" in \
- [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
- *) sdir=$(subdir)/$(srcdir) ;; \
- esac; \
- for i in $$list; do \
- if test -f "$$i"; then \
- echo "$(subdir)/$$i"; \
- else \
- echo "$$sdir/$$i"; \
- fi; \
- done >> $(top_builddir)/cscope.files
+classnoinst.stamp: $(am__java_sources)
+ @list1='$?'; list2=; if test -n "$$list1"; then \
+ for p in $$list1; do \
+ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \
+ list2="$$list2 $$d$$p"; \
+ done; \
+ echo '$(CLASSPATH_ENV) $(JAVAC) -d $(JAVAROOT) $(AM_JAVACFLAGS) $(JAVACFLAGS) '"$$list2"; \
+ $(CLASSPATH_ENV) $(JAVAC) -d $(JAVAROOT) $(AM_JAVACFLAGS) $(JAVACFLAGS) $$list2; \
+ else :; fi
+ echo timestamp > $@
+
+clean-noinstJAVA:
+ -rm -f *.class classnoinst.stamp
+tags TAGS:
+
+ctags CTAGS:
+
+cscope cscopelist:
-distclean-tags:
- -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
# Recover from deleted '.trs' file; this should ensure that
# "rm -f foo.log; make foo.trs" re-run 'foo.test', and re-create
@@ -854,7 +748,7 @@ $(TEST_SUITE_LOG): $(TEST_LOGS)
echo "$$col$$br$$std"; \
fi; \
$$success || exit 1
-recheck: all $(check_PROGRAMS)
+recheck: all $(check_SCRIPTS)
@test -z "$(TEST_SUITE_LOG)" || rm -f $(TEST_SUITE_LOG)
@set +e; $(am__set_TESTS_bases); \
bases=`for i in $$bases; do echo $$i; done \
@@ -865,13 +759,6 @@ recheck: all $(check_PROGRAMS)
am__force_recheck=am--force-recheck \
TEST_LOGS="$$log_list"; \
exit $$?
-nctest.log: nctest$(EXEEXT)
- @p='nctest$(EXEEXT)'; \
- b='nctest'; \
- $(am__check_pre) $(LOG_DRIVER) --test-name "$$f" \
- --log-file $$b.log --trs-file $$b.trs \
- $(am__common_driver_flags) $(AM_LOG_DRIVER_FLAGS) $(LOG_DRIVER_FLAGS) -- $(LOG_COMPILE) \
- "$$tst" $(AM_TESTS_FD_REDIRECT)
.sh.log:
@p='$<'; \
$(am__set_b); \
@@ -918,10 +805,10 @@ distdir: $(DISTFILES)
fi; \
done
check-am: all-am
- $(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS)
+ $(MAKE) $(AM_MAKEFLAGS) $(check_SCRIPTS)
$(MAKE) $(AM_MAKEFLAGS) check-TESTS
check: check-am
-all-am: Makefile all-local
+all-am: Makefile classnoinst.stamp $(DATA) all-local
installdirs:
install: install-am
install-exec: install-exec-am
@@ -948,25 +835,20 @@ mostlyclean-generic:
-test -z "$(TEST_SUITE_LOG)" || rm -f $(TEST_SUITE_LOG)
clean-generic:
+ -test -z "$(CLEANFILES)" || rm -f $(CLEANFILES)
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
- -test -z "$(DISTCLEANFILES)" || rm -f $(DISTCLEANFILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
-clean: clean-am
-
-clean-am: clean-checkPROGRAMS clean-generic clean-libtool \
- mostlyclean-am
+clean-am: clean-generic clean-libtool clean-noinstJAVA mostlyclean-am
distclean: distclean-am
- -rm -rf ./$(DEPDIR)
-rm -f Makefile
-distclean-am: clean-am distclean-compile distclean-generic \
- distclean-tags
+distclean-am: clean-am distclean-generic
dvi: dvi-am
@@ -1009,14 +891,13 @@ install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
- -rm -rf ./$(DEPDIR)
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-am
-mostlyclean-am: mostlyclean-compile mostlyclean-generic \
- mostlyclean-libtool mostlyclean-local
+mostlyclean-am: mostlyclean-generic mostlyclean-libtool \
+ mostlyclean-local
pdf: pdf-am
@@ -1030,20 +911,38 @@ uninstall-am:
.MAKE: check-am install-am install-strip
-.PHONY: CTAGS GTAGS TAGS all all-am all-local check check-TESTS \
- check-am clean clean-checkPROGRAMS clean-generic clean-libtool \
- cscopelist-am ctags ctags-am distclean distclean-compile \
- distclean-generic distclean-libtool distclean-tags distdir dvi \
- dvi-am html html-am info info-am install install-am \
+.PHONY: all all-am all-local check check-TESTS check-am clean \
+ clean-generic clean-libtool clean-noinstJAVA cscopelist-am \
+ ctags-am distclean distclean-generic distclean-libtool distdir \
+ dvi dvi-am html html-am info info-am install install-am \
install-data install-data-am install-dvi install-dvi-am \
install-exec install-exec-am install-html install-html-am \
install-info install-info-am install-man install-pdf \
install-pdf-am install-ps install-ps-am install-strip \
installcheck installcheck-am installdirs maintainer-clean \
- maintainer-clean-generic mostlyclean mostlyclean-compile \
- mostlyclean-generic mostlyclean-libtool mostlyclean-local pdf \
- pdf-am ps ps-am recheck tags tags-am uninstall uninstall-am
+ maintainer-clean-generic mostlyclean mostlyclean-generic \
+ mostlyclean-libtool mostlyclean-local pdf pdf-am ps ps-am \
+ recheck tags-am uninstall uninstall-am
+
+
+classes:
+ test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
+
+$(jarfile): classnoinst.stamp classes
+ $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
+
+.PHONY: classes
+#EXTRA_TEST = $(TESTS_JUNIT)
+
+AllJunitTests :
+ echo "#! /bin/sh" > $@
+ echo "exec @JUNIT@ test.TestAll" >> $@
+ chmod +x $@
+clean:
+ rm -rf $(JAVAROOT)/*
+ rm -f $(jarfile)
+ rm -f classnoinst.stamp
# lib/progs/tests targets recurse into subdirectories. build-* targets
# build files in this directory.
diff --git a/java/test/TestAll.java b/java/test/TestAll.java
new file mode 100644
index 0000000..0a5f77d
--- /dev/null
+++ b/java/test/TestAll.java
@@ -0,0 +1,32 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Products. The full HDF copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+
+ at RunWith(Suite.class)
+ at Suite.SuiteClasses( { TestH4.class,
+ TestH4ANparams.class,
+ TestH4DFparams.class,
+ TestH4DFPparams.class,
+ TestH4DFRparams.class,
+ TestH4GRparams.class,
+ TestH4HCparams.class,
+ TestH4SDparams.class,
+ TestH4Vparams.class,
+ TestH4VSparams.class
+})
+
+public class TestAll {
+}
diff --git a/java/test/TestH4.java b/java/test/TestH4.java
new file mode 100644
index 0000000..97253d3
--- /dev/null
+++ b/java/test/TestH4.java
@@ -0,0 +1,268 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Products. The full HDF copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdflib.HDFLibrary;
+import hdf.hdflib.HDFConstants;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+/**
+ * @author xcao
+ *
+ */
+public class TestH4 {
+ @Rule public TestName testname = new TestName();
+ private static final String H4_FILE = "test.hdf";
+ long Hfid = -1;
+
+ private final void _deleteFile(String filename) {
+ File file = new File(filename);
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ @Before
+ public void showTestName() {
+ System.out.print(testname.getMethodName());
+ }
+
+ @After
+ public void nextTestName() {
+ if (Hfid > 0)
+ try {HDFLibrary.Hclose(Hfid);} catch (Exception ex) {}
+
+ _deleteFile(H4_FILE);
+ System.out.println();
+ }
+
+ /**
+ * Test method for {@link hdf.hdflib.HDFLibrary#J2C(int)}.
+ */
+ @Test
+ public void testJ2C() {
+ int DFACC_RDONLY = 0x0001;
+ int DFACC_RDWR = 0x0003;
+ int DFACC_CREATE = 0x004;
+ int AN_DATA_LABEL = 0x0000;
+ int AN_DATA_DESC = 0x0001;
+ int AN_FILE_LABEL = 0x0002;
+ int AN_FILE_DESC = 0x0003;
+ int DFREF_NONE = 0x0000;
+
+ int definedValues[] = { DFACC_RDONLY, DFACC_RDWR, DFACC_CREATE, AN_DATA_LABEL,
+ AN_DATA_DESC, AN_FILE_LABEL, AN_FILE_DESC, DFREF_NONE };
+
+ int j2cValues[] = { HDFConstants.DFACC_RDONLY,
+ HDFConstants.DFACC_RDWR, HDFConstants.DFACC_CREATE, HDFConstants.AN_DATA_LABEL,
+ HDFConstants.AN_DATA_DESC, HDFConstants.AN_FILE_LABEL,
+ HDFConstants.AN_FILE_DESC, HDFConstants.DFREF_NONE };
+
+ for (int i = 0; i < definedValues.length; i++) {
+ assertEquals(definedValues[i], j2cValues[i]);
+ }
+
+ assertFalse(DFACC_RDONLY == HDFConstants.DFACC_RDWR);
+ assertFalse(AN_DATA_LABEL == HDFConstants.AN_FILE_DESC);
+ }
+
+ /**
+ * Test method for {@link hdf.hdflib.HDFLibrary#Hgetlibversion(int[], Strin[])}.
+ */
+ @Test
+ public void testHgetlibversion() {
+ int LIB_VERSION[] = { 4, 2, 11 };
+ int libversion[] = { 0, 0, 0 };
+ String version[] = { "" };
+
+ try {
+ HDFLibrary.Hgetlibversion(libversion, version);
+ }
+ catch (Throwable err) {
+ fail("HDFLibrary.Hgetlibversion: " + err);
+ }
+
+ for (int i = 0; i < 2; i++)
+ assertEquals(LIB_VERSION[i], libversion[i]);
+
+ for (int i = 0; i < 2; i++)
+ assertFalse(libversion[i] == 0);
+ }
+
+ /**
+ * Test method for {@link hdf.hdflib.HDFLibrary#Hopen()}.
+ */
+ @Test
+ public void testCreateCloseOpen() {
+ try {
+ Hfid = HDFLibrary.Hopen(H4_FILE, HDFConstants.DFACC_CREATE);
+ }
+ catch (Throwable err) {
+ fail("HDFLibrary.Hopen create failed: " + err);
+ }
+ assertTrue(Hfid > 0);
+ try {
+ HDFLibrary.Hclose(Hfid);
+ }
+ catch (Throwable err) {
+ fail("HDFLibrary.Hclose close failed: " + err);
+ }
+ try {
+ Hfid = HDFLibrary.Hopen(H4_FILE, HDFConstants.DFACC_RDWR);
+ }
+ catch (Throwable err) {
+ fail("HDFLibrary.Hopen open failed: " + err);
+ }
+ assertTrue(Hfid > 0);
+ }
+
+ /**
+ * Test method for {@link hdf.hdflib.HDFLibrary#Hishdf()}.
+ */
+ @Test
+ public void testHishdf() {
+ boolean fileIsHDF4 = false;
+
+ try {
+ Hfid = HDFLibrary.Hopen(H4_FILE, HDFConstants.DFACC_CREATE);
+ }
+ catch (Throwable err) {
+ fail("HDFLibrary.Hopen create failed: " + err);
+ }
+ assertTrue(Hfid > 0);
+ try {
+ HDFLibrary.Hclose(Hfid);
+ }
+ catch (Throwable err) {
+ fail("HDFLibrary.Hclose close failed: " + err);
+ }
+ try {
+ fileIsHDF4 = HDFLibrary.Hishdf(H4_FILE);
+ }
+ catch (Throwable err) {
+ fail("HDFLibrary.Hishdf failed: " + err);
+ }
+ assertTrue(fileIsHDF4);
+ }
+
+ /**
+ * Test method for {@link hdf.hdflib.HDFLibrary#HDgetNTdesc()}.
+ */
+ @Test
+ public void testHDgetNTdesc() {
+ String teststr = null;
+
+ try {
+ teststr = HDFLibrary.HDgetNTdesc(HDFConstants.DFNT_NATIVE);
+ }
+ catch (Throwable err) {
+ fail("HDFLibrary.HDgetNTdesc DFNT_NATIVE failed: " + err);
+ }
+ assertEquals("native format number-type not set", teststr);
+ try {
+ teststr = HDFLibrary.HDgetNTdesc(HDFConstants.DFNT_CUSTOM);
+ }
+ catch (Throwable err) {
+ fail("HDFLibrary.HDgetNTdesc DFNT_CUSTOM failed: " + err);
+ }
+ assertEquals("custom format number-type not set", teststr);
+ try {
+ teststr = HDFLibrary.HDgetNTdesc(HDFConstants.DFNT_FLOAT32);
+ }
+ catch (Throwable err) {
+ fail("HDFLibrary.HDgetNTdesc DFNT_FLOAT32 failed: " + err);
+ }
+ assertEquals("32-bit floating point", teststr);
+ try {
+ teststr = HDFLibrary.HDgetNTdesc(HDFConstants.DFNT_INT8);
+ }
+ catch (Throwable err) {
+ fail("HDFLibrary.HDgetNTdesc DFNT_INT8 failed: " + err);
+ }
+ assertEquals("8-bit signed integer", teststr);
+ try {
+ teststr = HDFLibrary.HDgetNTdesc(HDFConstants.DFNT_CHAR8);
+ }
+ catch (Throwable err) {
+ fail("HDFLibrary.HDgetNTdesc DFNT_CHAR8 failed: " + err);
+ }
+ assertEquals("8-bit signed char", teststr);
+ }
+
+ /**
+ * Test method for {@link hdf.hdflib.HDFLibrary#Hnumber()}.
+ */
+ @Test
+ public void testHnumber() {
+ int numberobjs = -1;
+
+ try {
+ Hfid = HDFLibrary.Hopen(H4_FILE, HDFConstants.DFACC_CREATE);
+ }
+ catch (Throwable err) {
+ fail("HDFLibrary.Hopen create failed: " + err);
+ }
+ assertTrue(Hfid > 0);
+ try {
+ numberobjs = HDFLibrary.Hnumber(Hfid);
+ }
+ catch (Throwable err) {
+ fail("HDFLibrary.Hnumber failed: " + err);
+ }
+ assertTrue(numberobjs > 0);
+ }
+
+ /**
+ * Test method for {@link hdf.hdflib.HDFLibrary#DFKNTsize()}.
+ */
+ @Test
+ public void testDFKNTsize() {
+ int testsize = -1;
+
+ try {
+ testsize = HDFLibrary.DFKNTsize(HDFConstants.DFNT_FLOAT64);
+ }
+ catch (Throwable err) {
+ fail("HDFLibrary.DFKNTsize DFNT_FLOAT64 failed: " + err);
+ }
+ assertEquals(8, testsize);
+ try {
+ testsize = HDFLibrary.DFKNTsize(HDFConstants.DFNT_INT16);
+ }
+ catch (Throwable err) {
+ fail("HDFLibrary.DFKNTsize DFNT_INT16 failed: " + err);
+ }
+ assertEquals(2, testsize);
+ try {
+ testsize = HDFLibrary.DFKNTsize(HDFConstants.DFNT_CHAR);
+ }
+ catch (Throwable err) {
+ fail("HDFLibrary.DFKNTsize DFNT_CHAR failed: " + err);
+ }
+ assertEquals(1, testsize);
+ }
+}
diff --git a/java/test/TestH4ANparams.java b/java/test/TestH4ANparams.java
new file mode 100644
index 0000000..49ce67b
--- /dev/null
+++ b/java/test/TestH4ANparams.java
@@ -0,0 +1,195 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Products. The full HDF copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdflib.HDFLibrary;
+import hdf.hdflib.HDFException;
+import hdf.hdflib.HDFConstants;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+/**
+ *
+ */
+public class TestH4ANparams {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void showTestName() {
+ System.out.print(testname.getMethodName());
+ }
+
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test(expected = HDFException.class)
+ public void testANstartIllegalId() throws Throwable {
+ HDFLibrary.ANstart(-1);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testANendIllegalId() throws Throwable {
+ HDFLibrary.ANend(-1);
+ }
+
+ @Test
+ public void testANendaccessIllegalId() throws Throwable {
+ //function does nothing
+ assertTrue(HDFLibrary.ANendaccess(-1));
+ }
+
+ @Test(expected = HDFException.class)
+ public void testANfileinfoIllegalId() throws Throwable {
+ int[] ids = {0, 0, 0, 0};
+ HDFLibrary.ANfileinfo(-1, ids);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testANfileinfoNull() throws Throwable {
+ HDFLibrary.ANfileinfo(0, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testANfileinfoArgument() throws Throwable {
+ int[] ids = {0, 0};
+ HDFLibrary.ANfileinfo(0, ids);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testANselectIllegalId() throws Throwable {
+ HDFLibrary.ANselect(-1, 0, 0);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testANnumannIllegalId() throws Throwable {
+ short tag = 0;
+ short ref = 0;
+ HDFLibrary.ANnumann(-1, 0, tag, ref);
+ }
+
+ @Test
+ public void testANatype2tagIllegalId() throws Throwable {
+ assertEquals(HDFLibrary.ANatype2tag(-1), HDFConstants.DFTAG_NULL);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testANtag2atypeIllegalId() throws Throwable {
+ short anttype = -1;
+ HDFLibrary.ANtag2atype(anttype);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testANannlistIllegalId() throws Throwable {
+ short tag = 0;
+ short ref = 0;
+ int[] ids = {0, 0, 0, 0};
+ HDFLibrary.ANannlist(-1, 0, tag, ref, ids);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testANannlistNull() throws Throwable {
+ short tag = 0;
+ short ref = 0;
+
+ HDFLibrary.ANannlist(0, 0, tag, ref, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testANannlenIllegalId() throws Throwable {
+ HDFLibrary.ANannlen(-1);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testANreadannIllegalId() throws Throwable {
+ String[] str = {""};
+ HDFLibrary.ANreadann(-1, str, 0);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testANcreateIllegalId() throws Throwable {
+ short tag = 0;
+ short ref = 0;
+ HDFLibrary.ANcreate(-1, tag, ref, 0);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testANcreatefIllegalId() throws Throwable {
+ HDFLibrary.ANcreatef(-1, 0);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testANget_tagrefIllegalId() throws Throwable {
+ short[] ref = {0, 0};
+ HDFLibrary.ANget_tagref(-1, 0, 0, ref);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testANget_tagrefNull() throws Throwable {
+ HDFLibrary.ANget_tagref(0, 0, 0, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testANget_tagrefArgument() throws Throwable {
+ short[] ref = {0};
+ HDFLibrary.ANget_tagref(0, 0, 0, ref);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testANid2tagrefIllegalId() throws Throwable {
+ short[] tag = {0, 0};
+ HDFLibrary.ANid2tagref(-1, tag);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testANid2tagrefNull() throws Throwable {
+ HDFLibrary.ANid2tagref(0, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testANid2tagrefArgument() throws Throwable {
+ short[] tag = {0};
+ HDFLibrary.ANid2tagref(0, tag);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testANtagref2idIllegalId() throws Throwable {
+ short tag = 0;
+ short ref = 0;
+ HDFLibrary.ANtagref2id(-1, tag, ref);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testANwriteannIllegalId() throws Throwable {
+ String str = "";
+ HDFLibrary.ANwriteann(-1, str, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testANwriteannNull() throws Throwable {
+ HDFLibrary.ANwriteann(0, null, 0);
+ }
+}
diff --git a/java/test/TestH4DFPparams.java b/java/test/TestH4DFPparams.java
new file mode 100644
index 0000000..59dac05
--- /dev/null
+++ b/java/test/TestH4DFPparams.java
@@ -0,0 +1,110 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Products. The full HDF copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdflib.HDFLibrary;
+import hdf.hdflib.HDFException;
+import hdf.hdflib.HDFConstants;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+/**
+ *
+ */
+public class TestH4DFPparams {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void showTestName() {
+ System.out.print(testname.getMethodName());
+ }
+
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFPaddpalNull() throws Throwable {
+ byte[] palette = {0};
+ HDFLibrary.DFPaddpal(null, palette);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFPaddpalNullPalette() throws Throwable {
+ String str = "";
+ HDFLibrary.DFPaddpal(str, null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFPgetpalNull() throws Throwable {
+ byte[] palette = {0};
+ HDFLibrary.DFPgetpal(null, palette);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFPgetpalNullPalette() throws Throwable {
+ String str = "";
+ HDFLibrary.DFPgetpal(str, null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFPnpalsNull() throws Throwable {
+ HDFLibrary.DFPnpals(null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFPputpalNull() throws Throwable {
+ byte[] palette = {0};
+ String arg = "a";
+ HDFLibrary.DFPputpal(null, palette, 0, arg);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFPputpalArgNull() throws Throwable {
+ byte[] palette = {0};
+ String arg = "a";
+ HDFLibrary.DFPputpal(arg, palette, 0, null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFPputpalNullPalette() throws Throwable {
+ String arg = "a";
+ String mode = "a";
+ HDFLibrary.DFPputpal(arg, null, 0, mode);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFPreadrefNull() throws Throwable {
+ short ref = 0;
+ HDFLibrary.DFPreadref(null, ref);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFPwriterefNull() throws Throwable {
+ short ref = 0;
+ HDFLibrary.DFPwriteref(null, ref);
+ }
+}
diff --git a/java/test/TestH4DFRparams.java b/java/test/TestH4DFRparams.java
new file mode 100644
index 0000000..ac9be4a
--- /dev/null
+++ b/java/test/TestH4DFRparams.java
@@ -0,0 +1,139 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Products. The full HDF copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdflib.HDFLibrary;
+import hdf.hdflib.HDFException;
+import hdf.hdflib.HDFChunkInfo;
+import hdf.hdflib.HDFConstants;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+
+public class TestH4DFRparams {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void showTestName() {
+ System.out.print(testname.getMethodName());
+ }
+
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFR8getdimsNullFilename() throws Throwable {
+ HDFLibrary.DFR8getdims(null, new int[] {0, 0}, new boolean[] { true });
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFR8getdimsNullDimensions() throws Throwable {
+ String str = "";
+ HDFLibrary.DFR8getdims(str, null, new boolean[] { true });
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFR8getdimsNullPalette() throws Throwable {
+ String str = "";
+ HDFLibrary.DFR8getdims(str, new int[] {0, 0}, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testDFR8getdimsIllegalArgument() throws Throwable {
+ String str = "";
+ HDFLibrary.DFR8getdims(str, new int[] {0}, new boolean[] { true });
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFR8getimageNullFilename() throws Throwable {
+ HDFLibrary.DFR8getimage(null, new byte[] { (byte) 0x0 }, 0, 0, new byte[] { (byte) 0x0 });
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFR8getimageNullImageData() throws Throwable {
+ String str = "";
+ HDFLibrary.DFR8getimage(str, null, 0, 0, new byte[] { (byte) 0x0 });
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFR8readrefNullFilename() throws Throwable {
+ HDFLibrary.DFR8readref(null, 0);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testDFR8readrefIllegalRef() throws Throwable {
+ String str = "";
+ HDFLibrary.DFR8readref(str, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFR8nimagesNullFilename() throws Throwable {
+ HDFLibrary.DFR8nimages(null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFR8addimageNullFilename() throws Throwable {
+ HDFLibrary.DFR8addimage(null, new byte[] { (byte) 0x0 }, 0, 0, (short) 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFR8addimageNullImage() throws Throwable {
+ String str = "";
+ HDFLibrary.DFR8addimage(str, null, 0, 0, (short) 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFR8putimageNullFilename() throws Throwable {
+ HDFLibrary.DFR8putimage(null, new byte[] { (byte) 0x0 }, 0, 0, (short) 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFR8putimageNullImage() throws Throwable {
+ String str = "";
+ HDFLibrary.DFR8putimage(str, null, 0, 0, (short) 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFR8setcompressNullCompInfo() throws Throwable {
+ HDFLibrary.DFR8setcompress(0, null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFR8getpalrefNullPalRef() throws Throwable {
+ HDFLibrary.DFR8getpalref(null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testDFR8getpalrefIllegalArgument() throws Throwable {
+ HDFLibrary.DFR8getpalref(new short[] { });
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDFR8writerefNullFilename() throws Throwable {
+ HDFLibrary.DFR8writeref(null, (short) 0);
+ }
+}
diff --git a/java/test/TestH4DFparams.java b/java/test/TestH4DFparams.java
new file mode 100644
index 0000000..c9da95b
--- /dev/null
+++ b/java/test/TestH4DFparams.java
@@ -0,0 +1,130 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Products. The full HDF copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdflib.HDFLibrary;
+import hdf.hdflib.HDFException;
+import hdf.hdflib.HDFChunkInfo;
+import hdf.hdflib.HDFConstants;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH4DFparams {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void showTestName() {
+ System.out.print(testname.getMethodName());
+ }
+
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDF24getdimsNullFilename() throws Throwable {
+ HDFLibrary.DF24getdims(null, new int[] { 0, 0, 0 });
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDF24getdimsNullArguments() throws Throwable {
+ String str = "";
+ HDFLibrary.DF24getdims(str, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testDF24getdimsIllegalArgument() throws Throwable {
+ String str = "";
+ HDFLibrary.DF24getdims(str, new int[] { 0, 0 });
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDF24getimageNullFilename() throws Throwable {
+ HDFLibrary.DF24getimage(null, new byte[] { (byte) 0x0 }, 0, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDF24getimageNullImageData() throws Throwable {
+ String str = "";
+ HDFLibrary.DF24getimage(str, null, 0, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDF24readrefNullFilename() throws Throwable {
+ HDFLibrary.DF24readref(null, 0);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testDF24readrefIllegalRef() throws Throwable {
+ String str = "";
+ HDFLibrary.DF24readref(str, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDF24nimagesNullFilename() throws Throwable {
+ HDFLibrary.DF24nimages(null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDF24addimageNullFilename() throws Throwable {
+ HDFLibrary.DF24addimage(null, new byte[] { (byte) 0x0 }, 0, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDF24addimageNullImage() throws Throwable {
+ String str = "";
+ HDFLibrary.DF24addimage(str, null, 0, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDF24putimageNullFilename() throws Throwable {
+ byte[] img = {0};
+ HDFLibrary.DF24putimage(null, img, 0, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDF24putimageNullImage() throws Throwable {
+ String str = "";
+ byte[] img = null;
+ HDFLibrary.DF24putimage(str, img, 0, 0);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testDF24putimageIllegalArgument() throws Throwable {
+ String str = "";
+ HDFLibrary.DF24putimage(str, new byte[] { }, 0, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testDF24setcompressNullCompInfo() throws Throwable {
+ HDFLibrary.DF24setcompress(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testDF24setilIllegalIl() throws Throwable {
+ HDFLibrary.DF24setil(-1);
+ }
+}
diff --git a/java/test/TestH4GRparams.java b/java/test/TestH4GRparams.java
new file mode 100644
index 0000000..82b7df1
--- /dev/null
+++ b/java/test/TestH4GRparams.java
@@ -0,0 +1,535 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Products. The full HDF copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdflib.HDFLibrary;
+import hdf.hdflib.HDFException;
+import hdf.hdflib.HDFChunkInfo;
+import hdf.hdflib.HDFCompInfo;
+import hdf.hdflib.HDFConstants;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+/**
+ *
+ */
+public class TestH4GRparams {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void showTestName() {
+ System.out.print(testname.getMethodName());
+ }
+
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRstartIllegalId() throws Throwable {
+ HDFLibrary.GRstart(-1);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRendIllegalId() throws Throwable {
+ HDFLibrary.GRend(-1);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRfileinfoIllegalId() throws Throwable {
+ int[] args = {0, 0};
+ HDFLibrary.GRfileinfo(-1, args);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRfileinfoNull() throws Throwable {
+ HDFLibrary.GRfileinfo(0, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testGRfileinfoArgument() throws Throwable {
+ int[] args = {0};
+ HDFLibrary.GRfileinfo(0, args);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRgetchunkinfoIllegalId() throws Throwable {
+ int[] args = {0};
+ HDFChunkInfo chunk_def = new HDFChunkInfo();
+ HDFLibrary.GRgetchunkinfo(-1, chunk_def, args);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRgetchunkinfoNull() throws Throwable {
+ HDFChunkInfo chunk_def = new HDFChunkInfo();
+ HDFLibrary.GRgetchunkinfo(0, chunk_def, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRselectIllegalId() throws Throwable {
+ HDFLibrary.GRselect(-1, 0);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRnametoindexIllegalId() throws Throwable {
+ HDFLibrary.GRnametoindex(-1, "");
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRnametoindexNull() throws Throwable {
+ HDFLibrary.GRnametoindex(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRgetiminfoIllegalId() throws Throwable {
+ String[] gr_name = {""};
+ int[] args = {0, 0, 0 ,0};
+ int[] dim_sizes = {0, 0};
+ HDFLibrary.GRgetiminfo(-1, gr_name, args, dim_sizes);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRgetiminfoNullName() throws Throwable {
+ int[] args = {0, 0, 0 ,0};
+ int[] dim_sizes = {0, 0};
+ HDFLibrary.GRgetiminfo(0, null, args, dim_sizes);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRgetiminfoNullArgs() throws Throwable {
+ String[] gr_name = {""};
+ int[] dim_sizes = {0, 0};
+ HDFLibrary.GRgetiminfo(0, gr_name, null, dim_sizes);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testGRgetiminfoArgumentArgs() throws Throwable {
+ String[] gr_name = {""};
+ int[] args = {0};
+ int[] dim_sizes = {0, 0};
+ HDFLibrary.GRgetiminfo(0, gr_name, args, dim_sizes);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRgetiminfoNullDims() throws Throwable {
+ String[] gr_name = {""};
+ int[] args = {0, 0, 0 ,0};
+ HDFLibrary.GRgetiminfo(0, gr_name, args, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testGRgetiminfoArgumentDims() throws Throwable {
+ String[] gr_name = {""};
+ int[] args = {0, 0, 0 ,0};
+ int[] dim_sizes = {0};
+ HDFLibrary.GRgetiminfo(0, gr_name, args, dim_sizes);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRreadimageIllegalId() throws Throwable {
+ int[] start = {0, 0};
+ int[] stride = {0, 0};
+ int[] count = {0, 0};
+ byte[] data = {0};
+ HDFLibrary.GRreadimage(-1, start, stride, count, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRreadimageNullData() throws Throwable {
+ int[] start = {0, 0};
+ int[] stride = {0, 0};
+ int[] count = {0, 0};
+ HDFLibrary.GRreadimage(0, start, stride, count, null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRreadimageNullStart() throws Throwable {
+ int[] stride = {0, 0};
+ int[] count = {0, 0};
+ byte[] data = {0};
+ HDFLibrary.GRreadimage(0, null, stride, count, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRreadimageNullCount() throws Throwable {
+ int[] start = {0, 0};
+ int[] stride = {0, 0};
+ byte[] data = {0};
+ HDFLibrary.GRreadimage(0, start, stride, null, data);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRendaccessIllegalId() throws Throwable {
+ HDFLibrary.GRendaccess(-1);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRidtorefIllegalId() throws Throwable {
+ HDFLibrary.GRidtoref(-1);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRreftoindexIllegalId() throws Throwable {
+ short ref = 0;
+ HDFLibrary.GRreftoindex(-1, ref);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRreqlutilIllegalId() throws Throwable {
+ int interlace = 0;
+ HDFLibrary.GRreqlutil(-1, interlace);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRreqimageilIllegalId() throws Throwable {
+ int interlace = 0;
+ HDFLibrary.GRreqimageil(-1, interlace);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRgetlutidIllegalId() throws Throwable {
+ int index = 0;
+ HDFLibrary.GRgetlutid(-1, index);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRgetnlutsIllegalId() throws Throwable {
+ HDFLibrary.GRgetnluts(-1);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRgetlutinfoIllegalId() throws Throwable {
+ int[] args = {0, 0, 0, 0};
+ HDFLibrary.GRgetlutinfo(-1, args);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRgetlutinfoNull() throws Throwable {
+ HDFLibrary.GRgetlutinfo(0, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testGRgetlutinfoArgument() throws Throwable {
+ int[] args = {0};
+ HDFLibrary.GRgetlutinfo(0, args);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRreadlutIllegalId() throws Throwable {
+ byte[] data = {0};
+ HDFLibrary.GRreadlut(-1, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRreadlutNull() throws Throwable {
+ HDFLibrary.GRreadlut(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRattrinfoIllegalId() throws Throwable {
+ String[] name = {""};
+ int index = 0;
+ int[] argv = {0, 0};
+ HDFLibrary.GRattrinfo(-1, index, name, argv);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRattrinfoNullName() throws Throwable {
+ int index = 0;
+ int[] argv = {0, 0};
+ HDFLibrary.GRattrinfo(0, index, null, argv);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRattrinfoNullArgs() throws Throwable {
+ String[] name = {""};
+ int index = 0;
+ HDFLibrary.GRattrinfo(0, index, name, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testGRattrinfoArgumentArgs() throws Throwable {
+ String[] name = {""};
+ int index = 0;
+ int[] argv = {0};
+ HDFLibrary.GRattrinfo(0, index, name, argv);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRgetattrIllegalId() throws Throwable {
+ int index = 0;
+ byte[] data = {0};
+ HDFLibrary.GRgetattr(-1, index, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRgetattrNull() throws Throwable {
+ int index = 0;
+ HDFLibrary.GRgetattr(0, index, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRfindattrIllegalId() throws Throwable {
+ String str = "";
+ HDFLibrary.GRfindattr(-1, str);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRfindattrNull() throws Throwable {
+ HDFLibrary.GRfindattr(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRcreateIllegalId() throws Throwable {
+ String name = "";
+ int ncomp = 0;
+ long data_type = 0;
+ int interlace_mode = 0;
+ int[] dim_sizes = {0, 0};
+ HDFLibrary.GRcreate(-1, name, ncomp, data_type, interlace_mode, dim_sizes);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRcreateNullName() throws Throwable {
+ int ncomp = 0;
+ long data_type = 0;
+ int interlace_mode = 0;
+ int[] dim_sizes = {0, 0};
+ HDFLibrary.GRcreate(0, null, ncomp, data_type, interlace_mode, dim_sizes);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRcreateNullDims() throws Throwable {
+ String name = "";
+ int ncomp = 0;
+ long data_type = 0;
+ int interlace_mode = 0;
+ HDFLibrary.GRcreate(0, name, ncomp, data_type, interlace_mode, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testGRcreateArgumentDims() throws Throwable {
+ String name = "";
+ int ncomp = 0;
+ long data_type = 0;
+ int interlace_mode = 0;
+ int[] dim_sizes = {0};
+ HDFLibrary.GRcreate(0, name, ncomp, data_type, interlace_mode, dim_sizes);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRluttorefIllegalId() throws Throwable {
+ HDFLibrary.GRluttoref(-1);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRsetattrStrIllegalId() throws Throwable {
+ String name = "";
+ long data_type = 0;
+ int count = 0;
+ String val = "";
+ HDFLibrary.GRsetattr(-1, name, data_type, count, val);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRsetattrStrNullName() throws Throwable {
+ long data_type = 0;
+ int count = 0;
+ String val = "";
+ HDFLibrary.GRsetattr(0, null, data_type, count, val);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRsetattrStrNullVals() throws Throwable {
+ String name = "";
+ long data_type = 0;
+ int count = 0;
+ String val = null;
+ HDFLibrary.GRsetattr(0, name, data_type, count, val);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRsetattrIllegalId() throws Throwable {
+ String name = "";
+ long data_type = 0;
+ int count = 0;
+ byte[] data = {0};
+ HDFLibrary.GRsetattr(-1, name, data_type, count, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRsetattrNullName() throws Throwable {
+ long data_type = 0;
+ int count = 0;
+ byte[] data = {0};
+ HDFLibrary.GRsetattr(0, null, data_type, count, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRsetattrNullVals() throws Throwable {
+ String name = "";
+ long data_type = 0;
+ int count = 0;
+ byte[] data = null;
+ HDFLibrary.GRsetattr(0, name, data_type, count, data);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRsetchunkIllegalId() throws Throwable {
+ int flags = 0;
+ HDFChunkInfo chunk_def = new HDFChunkInfo();
+ HDFLibrary.GRsetchunk(-1, chunk_def, flags);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRsetchunkNull() throws Throwable {
+ int flags = 0;
+ HDFChunkInfo chunk_def = null;
+ HDFLibrary.GRsetchunk(0, null, flags);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRsetchunkcacheIllegalId() throws Throwable {
+ int maxcache = 0;
+ int flags = 0;
+ HDFLibrary.GRsetchunkcache(-1, maxcache, flags);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRsetcompressIllegalId() throws Throwable {
+ int comp_type = 0;
+ HDFCompInfo comp_info = new HDFCompInfo();
+ HDFLibrary.GRsetcompress(-1, comp_type, comp_info);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRsetcompressNull() throws Throwable {
+ int comp_type = 0;
+ HDFLibrary.GRsetcompress(0, comp_type, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRgetcompressIllegalId() throws Throwable {
+ HDFCompInfo comp_info = new HDFCompInfo();
+ HDFLibrary.GRgetcompress(-1, comp_info);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRgetcompressNull() throws Throwable {
+ HDFLibrary.GRgetcompress(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRsetexternalfileIllegalId() throws Throwable {
+ String str = "";
+ int offset = 0;
+ HDFLibrary.GRsetexternalfile(-1, str, offset);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRsetexternalfileNull() throws Throwable {
+ int offset = 0;
+ HDFLibrary.GRsetexternalfile(0, null, offset);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRwriteimageIllegalId() throws Throwable {
+ int[] start = {0, 0};
+ int[] stride = {0, 0};
+ int[] count = {0, 0};
+ byte[] data = {0};
+ HDFLibrary.GRwriteimage(-1, start, stride, count, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRwriteimageNullData() throws Throwable {
+ int[] start = {0, 0};
+ int[] stride = {0, 0};
+ int[] count = {0, 0};
+ HDFLibrary.GRwriteimage(0, start, stride, count, null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRwriteimageNullStart() throws Throwable {
+ int[] stride = {0, 0};
+ int[] count = {0, 0};
+ byte[] data = {0};
+ HDFLibrary.GRwriteimage(0, null, stride, count, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRwriteimageNullCount() throws Throwable {
+ int[] start = {0, 0};
+ int[] stride = {0, 0};
+ byte[] data = {0};
+ HDFLibrary.GRwriteimage(0, start, stride, null, data);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRwritelutIllegalId() throws Throwable {
+ int ncomp = 0;
+ int data_type = 0;
+ int interlace_mode = 0;
+ int num_entries = 0;
+ byte[] data = {0};
+ HDFLibrary.GRwritelut(-1, ncomp, data_type, interlace_mode, num_entries, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRwritelutNull() throws Throwable {
+ int ncomp = 0;
+ int data_type = 0;
+ int interlace_mode = 0;
+ int num_entries = 0;
+ HDFLibrary.GRwritelut(0, ncomp, data_type, interlace_mode, num_entries, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testGRreadchunkIllegalId() throws Throwable {
+ int[] args = {0, 0};
+ byte[] data = {0};
+ HDFLibrary.GRreadchunk(-1, args, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRreadchunkNull() throws Throwable {
+ int[] args = {0, 0};
+ HDFLibrary.GRreadchunk(0, args, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testGRreadchunkArgument() throws Throwable {
+ int[] args = {0};
+ byte[] data = {0};
+ HDFLibrary.GRreadchunk(0, args, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testGRreadchunkNullArgument() throws Throwable {
+ byte[] data = {0};
+ HDFLibrary.GRreadchunk(0, null, data);
+ }
+}
diff --git a/java/test/TestH4HCparams.java b/java/test/TestH4HCparams.java
new file mode 100644
index 0000000..7ef286f
--- /dev/null
+++ b/java/test/TestH4HCparams.java
@@ -0,0 +1,52 @@
+
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Products. The full HDF copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdflib.HDFLibrary;
+import hdf.hdflib.HDFException;
+import hdf.hdflib.HDFChunkInfo;
+import hdf.hdflib.HDFConstants;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH4HCparams {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void showTestName() {
+ System.out.print(testname.getMethodName());
+ }
+
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test//(expected = HDFException.class)
+ public void testHCget_config_infoIllegalCoderType() throws Throwable {
+ //HDFLibrary.HCget_config_info(HDFConstants.COMP_CODE_INVALID);
+ }
+}
diff --git a/java/test/TestH4SDparams.java b/java/test/TestH4SDparams.java
new file mode 100644
index 0000000..d19b449
--- /dev/null
+++ b/java/test/TestH4SDparams.java
@@ -0,0 +1,746 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Products. The full HDF copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdflib.HDFLibrary;
+import hdf.hdflib.HDFException;
+import hdf.hdflib.HDFChunkInfo;
+import hdf.hdflib.HDFCompInfo;
+import hdf.hdflib.HDFConstants;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+/**
+ *
+ */
+public class TestH4SDparams {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void showTestName() {
+ System.out.print(testname.getMethodName());
+ }
+
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDstartNull() throws Throwable {
+ HDFLibrary.SDstart(null, 0);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDendIllegalId() throws Throwable {
+ HDFLibrary.SDend(-1);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDfileinfoIllegalId() throws Throwable {
+ int[] args = {0, 0};
+ HDFLibrary.SDfileinfo(-1, args);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDfileinfoNull() throws Throwable {
+ HDFLibrary.SDfileinfo(0, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testSDfileinfoArgument() throws Throwable {
+ int[] args = {0};
+ HDFLibrary.SDfileinfo(0, args);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDselectIllegalId() throws Throwable {
+ HDFLibrary.SDselect(-1, 0);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDnametoindexIllegalId() throws Throwable {
+ HDFLibrary.SDnametoindex(-1, "");
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDnametoindexNull() throws Throwable {
+ HDFLibrary.SDnametoindex(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDgetinfoIllegalId() throws Throwable {
+ String[] name = {""};
+ int[] args = {0, 0, 0};
+ int[] dim_sizes = {0, 0};
+ HDFLibrary.SDgetinfo(-1, name, dim_sizes, args);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDgetinfoNullName() throws Throwable {
+ int[] args = {0, 0, 0};
+ int[] dim_sizes = {0, 0};
+ HDFLibrary.SDgetinfo(0, null, dim_sizes, args);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDgetinfoNullArgs() throws Throwable {
+ String[] name = {""};
+ int[] dim_sizes = {0, 0};
+ HDFLibrary.SDgetinfo(0, name, dim_sizes, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testSDgetinfoArgumentArgs() throws Throwable {
+ String[] name = {""};
+ int[] args = {0};
+ int[] dim_sizes = {0, 0};
+ HDFLibrary.SDgetinfo(0, name, dim_sizes, args);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDgetinfoNullDims() throws Throwable {
+ String[] name = {""};
+ int[] args = {0, 0, 0};
+ HDFLibrary.SDgetinfo(0, name, null, args);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDcheckemptyIllegalId() throws Throwable {
+ int[] emptySDS = {0};
+ HDFLibrary.SDcheckempty(-1, emptySDS);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDcheckemptyNull() throws Throwable {
+ HDFLibrary.SDcheckempty(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDreaddataIllegalId() throws Throwable {
+ int[] start = {0, 0};
+ int[] stride = {0, 0};
+ int[] count = {0, 0};
+ byte[] data = {0};
+ HDFLibrary.SDreaddata(-1, start, stride, count, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDreaddataNullData() throws Throwable {
+ int[] start = {0, 0};
+ int[] stride = {0, 0};
+ int[] count = {0, 0};
+ HDFLibrary.SDreaddata(0, start, stride, count, null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDreaddataNullStart() throws Throwable {
+ int[] stride = {0, 0};
+ int[] count = {0, 0};
+ byte[] data = {0};
+ HDFLibrary.SDreaddata(0, null, stride, count, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDreaddataNullCount() throws Throwable {
+ int[] start = {0, 0};
+ int[] stride = {0, 0};
+ byte[] data = {0};
+ HDFLibrary.SDreaddata(0, start, stride, null, data);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDreaddata_shortIllegalId() throws Throwable {
+ int[] start = {0, 0};
+ int[] stride = {0, 0};
+ int[] count = {0, 0};
+ short[] data = {0};
+ HDFLibrary.SDreaddata_short(-1, start, stride, count, data);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDreaddata_intIllegalId() throws Throwable {
+ int[] start = {0, 0};
+ int[] stride = {0, 0};
+ int[] count = {0, 0};
+ int[] data = {0};
+ HDFLibrary.SDreaddata_int(-1, start, stride, count, data);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDreaddata_longIllegalId() throws Throwable {
+ int[] start = {0, 0};
+ int[] stride = {0, 0};
+ int[] count = {0, 0};
+ long[] data = {0};
+ HDFLibrary.SDreaddata_long(-1, start, stride, count, data);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDreaddata_floatIllegalId() throws Throwable {
+ int[] start = {0, 0};
+ int[] stride = {0, 0};
+ int[] count = {0, 0};
+ float[] data = {0};
+ HDFLibrary.SDreaddata_float(-1, start, stride, count, data);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDreaddata_doubleIllegalId() throws Throwable {
+ int[] start = {0, 0};
+ int[] stride = {0, 0};
+ int[] count = {0, 0};
+ double[] data = {0};
+ HDFLibrary.SDreaddata_double(-1, start, stride, count, data);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDendaccessIllegalId() throws Throwable {
+ HDFLibrary.SDendaccess(-1);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDgetdimidIllegalId() throws Throwable {
+ HDFLibrary.SDgetdimid(-1, 0);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDdiminfoIllegalId() throws Throwable {
+ String[] name = {""};
+ int[] argv = {0, 0, 0};
+ HDFLibrary.SDdiminfo(-1, name, argv);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDdiminfoNullName() throws Throwable {
+ int[] argv = {0, 0, 0};
+ HDFLibrary.SDdiminfo(0, null, argv);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDdiminfoNullArgs() throws Throwable {
+ String[] name = {""};
+ HDFLibrary.SDdiminfo(0, name, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testSDdiminfoArgumentArgs() throws Throwable {
+ String[] name = {""};
+ int[] argv = {0};
+ HDFLibrary.SDdiminfo(0, name, argv);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDidtorefIllegalId() throws Throwable {
+ HDFLibrary.SDidtoref(-1);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDreftoindexIllegalId() throws Throwable {
+ short ref = 0;
+ HDFLibrary.SDreftoindex(-1, ref);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDattrinfoIllegalId() throws Throwable {
+ String[] name = {""};
+ int index = 0;
+ int[] argv = {0, 0};
+ HDFLibrary.SDattrinfo(-1, index, name, argv);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDattrinfoNullName() throws Throwable {
+ int index = 0;
+ int[] argv = {0, 0};
+ HDFLibrary.SDattrinfo(0, index, null, argv);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDattrinfoNullArgs() throws Throwable {
+ String[] name = {""};
+ int index = 0;
+ HDFLibrary.SDattrinfo(0, index, name, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testSDattrinfoArgumentArgs() throws Throwable {
+ String[] name = {""};
+ int index = 0;
+ int[] argv = {0};
+ HDFLibrary.SDattrinfo(0, index, name, argv);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDreadattrIllegalId() throws Throwable {
+ int index = 0;
+ byte[] data = {0};
+ HDFLibrary.SDreadattr(-1, index, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDreadattrNull() throws Throwable {
+ int index = 0;
+ HDFLibrary.SDreadattr(0, index, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDfindattrIllegalId() throws Throwable {
+ String str = "";
+ HDFLibrary.SDfindattr(-1, str);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDfindattrNull() throws Throwable {
+ HDFLibrary.SDfindattr(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDiscoordvarIllegalId() throws Throwable {
+ HDFLibrary.SDiscoordvar(-1);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDgetcalIllegalId() throws Throwable {
+ double[] args = {0, 0, 0, 0};
+ int[] NT = {0};
+ HDFLibrary.SDgetcal(-1, args, NT);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDgetcalNull() throws Throwable {
+ double[] args = {0, 0, 0, 0};
+ HDFLibrary.SDgetcal(0, args, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testSSDgetcalArgument() throws Throwable {
+ double[] args = {0};
+ int[] NT = {0};
+ HDFLibrary.SDgetcal(0, args, NT);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDgetcalNullArgument() throws Throwable {
+ int[] NT = {0};
+ HDFLibrary.SDgetcal(0, null, NT);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDgetdatastrsIllegalId() throws Throwable {
+ String[] str = {"", "", "", ""};
+ int len = 0;
+ HDFLibrary.SDgetdatastrs(-1, str, len);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDgetdatastrsNull() throws Throwable {
+ int len = 0;
+ HDFLibrary.SDgetdatastrs(0, null, len);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testSDgetdatastrsArgument() throws Throwable {
+ String[] str = {""};
+ int len = 0;
+ HDFLibrary.SDgetdatastrs(-1, str, len);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDgetdimstrsIllegalId() throws Throwable {
+ String[] str = {"", "", "", ""};
+ int len = 0;
+ HDFLibrary.SDgetdimstrs(-1, str, len);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDgetdimstrsNull() throws Throwable {
+ int len = 0;
+ HDFLibrary.SDgetdimstrs(0, null, len);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testSDgetdimstrsArgument() throws Throwable {
+ String[] str = {""};
+ int len = 0;
+ HDFLibrary.SDgetdimstrs(-1, str, len);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDgetdimscaleIllegalId() throws Throwable {
+ byte[] data = {0};
+ HDFLibrary.SDgetdimscale(-1, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDgetdimscaleNull() throws Throwable {
+ HDFLibrary.SDgetdimscale(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDgetfillvalueIllegalId() throws Throwable {
+ byte[] data = {0};
+ HDFLibrary.SDgetfillvalue(-1, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDgetfillvalueNull() throws Throwable {
+ byte[] data = null;
+ HDFLibrary.SDgetfillvalue(0, data);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDgetrangeIllegalId() throws Throwable {
+ byte[] min = {0};
+ byte[] max = {0};
+ HDFLibrary.SDgetrange(-1, max, min);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDgetrangeNullMin() throws Throwable {
+ byte[] min = null;
+ byte[] max = {0};
+ HDFLibrary.SDgetrange(0, max, min);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDgetrangeNullMax() throws Throwable {
+ byte[] min = {0};
+ byte[] max = null;
+ HDFLibrary.SDgetrange(0, max, min);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDcreateIllegalId() throws Throwable {
+ String name = "";
+ long num_type = 0;
+ int rank = 0;
+ int[] dim_sizes = {0};
+ HDFLibrary.SDcreate(-1, name, num_type, rank, dim_sizes);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDcreateNullName() throws Throwable {
+ long num_type = 0;
+ int rank = 0;
+ int[] dim_sizes = {0, 0};
+ HDFLibrary.SDcreate(0, null, num_type, rank, dim_sizes);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDcreateNullDims() throws Throwable {
+ String name = "";
+ long num_type = 0;
+ int rank = 0;
+ HDFLibrary.SDcreate(0, name, num_type, rank, null);
+ }
+
+ // this API call returns false for not record and for failures
+ // maybe failures should return a negative?
+ @Ignore//(expected = HDFException.class)
+ public void testSDisrecordIllegalId() throws Throwable {
+ HDFLibrary.SDisrecord(-1);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDsetattrIllegalId() throws Throwable {
+ String name = "";
+ long num_type = 0;
+ int count = 0;
+ byte[] data = {0};
+ HDFLibrary.SDsetattr(-1, name, num_type, count, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDsetattrNullName() throws Throwable {
+ long num_type = 0;
+ int count = 0;
+ byte[] data = {0};
+ HDFLibrary.SDsetattr(0, null, num_type, count, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDsetattrNullVals() throws Throwable {
+ String name = "";
+ long num_type = 0;
+ int count = 0;
+ byte[] data = null;
+ HDFLibrary.SDsetattr(0, name, num_type, count, data);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDsetcalIllegalId() throws Throwable {
+ double val = 0;
+ int num_type = 0;
+ HDFLibrary.SDsetcal(-1, val, val, val, val, num_type);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDsetdatastrsIllegalId() throws Throwable {
+ String[] str = {"", "", "", ""};
+ HDFLibrary.SDsetdatastrs(-1, str[0], str[1], str[2], str[3]);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDsetdimnameIllegalId() throws Throwable {
+ String str = "";
+ HDFLibrary.SDsetdimname(-1, str);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDsetdimnameNull() throws Throwable {
+ String str = null;
+ HDFLibrary.SDsetdimname(-1, str);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDsetdimscaleIllegalId() throws Throwable {
+ int num_type = 0;
+ int count = 0;
+ byte[] data = {0};
+ HDFLibrary.SDsetdimscale(-1, count, num_type, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDsetdimscaleNull() throws Throwable {
+ int num_type = 0;
+ int count = 0;
+ byte[] data = null;
+ HDFLibrary.SDsetdimscale(0, count, num_type, data);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDsetdimstrsIllegalId() throws Throwable {
+ String[] str = {"", "", ""};
+ HDFLibrary.SDsetdimstrs(-1, str[0], str[1], str[2]);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDsetexternalfileIllegalId() throws Throwable {
+ String str = "";
+ int offset = 0;
+ HDFLibrary.SDsetexternalfile(-1, str, offset);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDsetexternalfileNull() throws Throwable {
+ String str = null;
+ int offset = 0;
+ HDFLibrary.SDsetexternalfile(-1, str, offset);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDsetfillvalueIllegalId() throws Throwable {
+ byte[] data = {0};
+ HDFLibrary.SDsetfillvalue(-1, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDsetfillvalueNull() throws Throwable {
+ byte[] data = null;
+ HDFLibrary.SDsetfillvalue(0, data);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDsetrangeIllegalId() throws Throwable {
+ byte[] min = {0};
+ byte[] max = {0};
+ HDFLibrary.SDsetrange(-1, max, min);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDsetrangeNullMin() throws Throwable {
+ byte[] min = null;
+ byte[] max = {0};
+ HDFLibrary.SDsetrange(0, max, min);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDsetrangeNullMax() throws Throwable {
+ byte[] min = {0};
+ byte[] max = null;
+ HDFLibrary.SDsetrange(0, max, min);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDwritedataIllegalId() throws Throwable {
+ int[] start = {0, 0};
+ int[] stride = {0, 0};
+ int[] count = {0, 0};
+ byte[] data = {0};
+ HDFLibrary.SDwritedata(-1, start, stride, count, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDwritedataNullData() throws Throwable {
+ int[] start = {0, 0};
+ int[] stride = {0, 0};
+ int[] count = {0, 0};
+ HDFLibrary.SDwritedata(0, start, stride, count, null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDwritedataNullStart() throws Throwable {
+ int[] stride = {0, 0};
+ int[] count = {0, 0};
+ byte[] data = {0};
+ HDFLibrary.SDwritedata(0, null, stride, count, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDwritedataNullCount() throws Throwable {
+ int[] start = {0, 0};
+ int[] stride = {0, 0};
+ byte[] data = {0};
+ HDFLibrary.SDwritedata(0, start, stride, null, data);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDsetnbitdatasetIllegalId() throws Throwable {
+ int start_bit = 0;
+ int bit_len = 0;
+ int sign_ext = 0;
+ int fill_one = 0;
+ HDFLibrary.SDsetnbitdataset(-1, start_bit, bit_len, sign_ext, fill_one);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDsetcompressIllegalId() throws Throwable {
+ int type = 0;
+ HDFCompInfo comp_info = new HDFCompInfo();
+ HDFLibrary.SDsetcompress(-1, type, comp_info);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDsetcompressNull() throws Throwable {
+ int type = 0;
+ HDFCompInfo comp_info = null;
+ HDFLibrary.SDsetcompress(0, type, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDgetcompinfoIllegalId() throws Throwable {
+ HDFCompInfo comp_info = new HDFCompInfo();
+ HDFLibrary.SDgetcompinfo(-1, comp_info);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDgetcompinfoNull() throws Throwable {
+ HDFCompInfo comp_info = null;
+ HDFLibrary.SDgetcompinfo(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDsetaccesstypeIllegalId() throws Throwable {
+ int index = 0;
+ HDFLibrary.SDsetaccesstype(-1, index);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDsetblocksizeIllegalId() throws Throwable {
+ int block_size = 0;
+ HDFLibrary.SDsetblocksize(-1, block_size);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDsetfillmodeIllegalId() throws Throwable {
+ boolean fill_enable = false;
+ HDFLibrary.SDsetfillmode(-1, fill_enable);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDsetdimval_compIllegalId() throws Throwable {
+ int comp_mode = 0;
+ HDFLibrary.SDsetdimval_comp(-1, comp_mode);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDisdimval_bwcompIllegalId() throws Throwable {
+ HDFLibrary.SDisdimval_bwcomp(-1);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDsetchunkIllegalId() throws Throwable {
+ int flags = 0;
+ HDFChunkInfo chunk_def = new HDFChunkInfo();
+ HDFLibrary.SDsetchunk(-1, chunk_def, flags);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDsetchunkNull() throws Throwable {
+ int flags = 0;
+ HDFChunkInfo chunk_def = null;
+ HDFLibrary.SDsetchunk(0, null, flags);
+ }
+
+ //Library routine check of ID will not fail - it will just crash
+ @Ignore//(expected = HDFException.class)
+ public void testSDreadchunkIllegalId() throws Throwable {
+ int[] args = {0, 0};
+ byte[] data = {0};
+ HDFLibrary.SDreadchunk(-1, args, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDreadchunkNull() throws Throwable {
+ int[] args = {0, 0};
+ byte[] data = null;
+ HDFLibrary.SDreadchunk(0, args, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDreadchunkNullArgument() throws Throwable {
+ byte[] data = {0};
+ HDFLibrary.SDreadchunk(0, null, data);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testSDsetchunkcacheIllegalId() throws Throwable {
+ int maxcache = 0;
+ int flags = 0;
+ HDFLibrary.SDsetchunkcache(-1, maxcache, flags);
+ }
+
+ //Library routine check of ID will not fail - it will just crash
+ @Ignore//(expected = HDFException.class)
+ public void testSDwritechunkIllegalId() throws Throwable {
+ int[] args = {0, 0};
+ byte[] data = {0};
+ HDFLibrary.SDwritechunk(-1, args, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDwritechunkNull() throws Throwable {
+ int[] args = {0, 0};
+ byte[] data = null;
+ HDFLibrary.SDwritechunk(0, args, data);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testSDwritechunkNullArgument() throws Throwable {
+ byte[] data = {0};
+ HDFLibrary.SDwritechunk(0, null, data);
+ }
+}
diff --git a/java/test/TestH4VSparams.java b/java/test/TestH4VSparams.java
new file mode 100644
index 0000000..0a7193f
--- /dev/null
+++ b/java/test/TestH4VSparams.java
@@ -0,0 +1,476 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Products. The full HDF copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdflib.HDFLibrary;
+import hdf.hdflib.HDFException;
+import hdf.hdflib.HDFChunkInfo;
+import hdf.hdflib.HDFConstants;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH4VSparams {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void showTestName() {
+ System.out.print(testname.getMethodName());
+ }
+
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSQuerycountNullNRecords() throws Throwable {
+ HDFLibrary.VSQuerycount(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSQuerycountIllegalId() throws Throwable {
+ HDFLibrary.VSQuerycount(-1, new int[] { 0 });
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVSQuerycountIllegalArgument() throws Throwable {
+ HDFLibrary.VSQuerycount(0, new int[] { });
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSQueryfieldsNullFields() throws Throwable {
+ HDFLibrary.VSQueryfields(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSQueryfieldsIllegalId() throws Throwable {
+ HDFLibrary.VSQueryfields(-1, new String[1]);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVSQueryfieldsIllegalArgument() throws Throwable {
+ HDFLibrary.VSQueryfields(0, new String[0]);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSQueryinterlaceNullInterlace() throws Throwable {
+ HDFLibrary.VSQueryinterlace(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSQueryinterlaceIllegalId() throws Throwable {
+ HDFLibrary.VSQueryinterlace(-1, new int[1]);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVSQueryinterlaceIllegalArgument() throws Throwable {
+ HDFLibrary.VSQueryinterlace(0, new int[0]);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSQuerynameNullVDataName() throws Throwable {
+ HDFLibrary.VSQueryname(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSQuerynameIllegalId() throws Throwable {
+ HDFLibrary.VSQueryname(-1, new String[1]);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVSQuerynameIllegalArgument() throws Throwable {
+ HDFLibrary.VSQueryname(0, new String[0]);
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVSQueryrefIllegalId() throws Throwable {
+// HDFLibrary.VSQueryref(-1);
+// }
+
+// @Test(expected = HDFException.class)
+// public void testVSQuerytagIllegalId() throws Throwable {
+// HDFLibrary.VSQuerytag(-1);
+// }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSQueryvsizeNullVDataSize() throws Throwable {
+ HDFLibrary.VSQueryvsize(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSQueryvsizeIllegalId() throws Throwable {
+ HDFLibrary.VSQueryvsize(-1, new int[1]);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVSQueryvsizeIllegalArgument() throws Throwable {
+ HDFLibrary.VSQueryvsize(0, new int[0]);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSattachNullAccess() throws Throwable {
+ HDFLibrary.VSattach(0, 0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSattachIllegalId() throws Throwable {
+ HDFLibrary.VSattach(-1, 0, "w");
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSattachIllegalReg() throws Throwable {
+ HDFLibrary.VSattach(0, -1, "w");
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVSdetachIllegalId() throws Throwable {
+// HDFLibrary.VSdetach(-1);
+// }
+
+// @Test(expected = HDFException.class)
+// public void testVSgetidIllegalId() throws Throwable {
+// HDFLibrary.VSgetid(-1, 0);
+// }
+
+// @Test(expected = HDFException.class)
+// public void testVSgetidIllegalRef() throws Throwable {
+// HDFLibrary.VSgetid(0, -1);
+// }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSgetclassNullClassName() throws Throwable {
+ HDFLibrary.VSgetclass(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSgetclassIllegalId() throws Throwable {
+ HDFLibrary.VSgetclass(-1, new String[1]);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSgetnameNullName() throws Throwable {
+ HDFLibrary.VSgetname(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSgetnameIllegalId() throws Throwable {
+ HDFLibrary.VSgetname(-1, new String[1]);
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVSeltsIllegalId() throws Throwable {
+// HDFLibrary.VSelts(-1);
+// }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSfdefineNullFieldName() throws Throwable {
+ HDFLibrary.VSfdefine(0, null, 0, 0);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSfdefineIllegalId() throws Throwable {
+ HDFLibrary.VSfdefine(-1, "", 0, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSfexistNullFields() throws Throwable {
+ HDFLibrary.VSfexist(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSfexistIllegalId() throws Throwable {
+ HDFLibrary.VSfexist(-1, "");
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSfindNullVDataName() throws Throwable {
+ HDFLibrary.VSfind(0, null);
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVSfindIllegalId() throws Throwable {
+// HDFLibrary.VSfind(-1, "");
+// }
+
+ @Test(expected = HDFException.class)
+ public void testVSsetblocksizeIllegalId() throws Throwable {
+ HDFLibrary.VSsetblocksize(-1, 0);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSsetnumblocksIllegalId() throws Throwable {
+ HDFLibrary.VSsetnumblocks(-1, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSgetfieldsNullFieldName() throws Throwable {
+ HDFLibrary.VSgetfields(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSgetfieldsIllegalId() throws Throwable {
+ HDFLibrary.VSgetfields(-1, new String[1]);
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVSgetinterlaceIllegalId() throws Throwable {
+// HDFLibrary.VSgetinterlace(-1);
+// }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSinquireNullIArgs() throws Throwable {
+ HDFLibrary.VSinquire(0, null, new String[2]);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSinquireNullSArgs() throws Throwable {
+ HDFLibrary.VSinquire(0, new int[3], null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSinquireIllegalId() throws Throwable {
+ HDFLibrary.VSinquire(-1, new int[3], new String[2]);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVSinquireIllegalIArgs() throws Throwable {
+ HDFLibrary.VSinquire(0, new int[1], new String[2]);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVSinquireIllegalSArgs() throws Throwable {
+ HDFLibrary.VSinquire(0, new int[3], new String[1]);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSloneNullRefArray() throws Throwable {
+ HDFLibrary.VSlone(0, null, 0);
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVSloneIllegalId() throws Throwable {
+// HDFLibrary.VSlone(-1, new int[10], 0);
+// }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSreadNullDataBuffer() throws Throwable {
+ HDFLibrary.VSread(0, null, 0, 0);
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVSreadIllegalId() throws Throwable {
+// HDFLibrary.VSread(-1, new byte[] { }, 0, 0);
+// }
+
+// @Test(expected = HDFException.class)
+// public void testVSseekIllegalId() throws Throwable {
+// HDFLibrary.VSseek(-1, 0);
+// }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSsetfieldsNullFields() throws Throwable {
+ HDFLibrary.VSsetfields(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSsetfieldsIllegalId() throws Throwable {
+ HDFLibrary.VSsetfields(-1, "");
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSsetinterlaceIllegalId() throws Throwable {
+ HDFLibrary.VSsetinterlace(-1, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSsizeofNullFields() throws Throwable {
+ HDFLibrary.VSsizeof(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSsizeofIllegalId() throws Throwable {
+ HDFLibrary.VSsizeof(-1, "");
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSappendableIllegalVKey() throws Throwable {
+ HDFLibrary.VSappendable(-1, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSfindclassNullClass() throws Throwable {
+ HDFLibrary.VSfindclass(0, null);
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVSfindclassIllegalId() throws Throwable {
+// HDFLibrary.VSfindclass(-1, "");
+// }
+
+// @Test(expected = HDFException.class)
+// public void testVSgetversionIllegalVKey() throws Throwable {
+// HDFLibrary.VSgetversion(-1);
+// }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSsetclassNullClass() throws Throwable {
+ HDFLibrary.VSsetclass(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSsetclassIllegalId() throws Throwable {
+ HDFLibrary.VSsetclass(-1, "");
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSsetexternalfileNullFilename() throws Throwable {
+ HDFLibrary.VSsetexternalfile(0, null, 0);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSsetexternalfileIllegalVKey() throws Throwable {
+ HDFLibrary.VSsetexternalfile(-1, "", 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSsetnameNullVDataName() throws Throwable {
+ HDFLibrary.VSsetname(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSsetnameIllegalId() throws Throwable {
+ HDFLibrary.VSsetname(-1, "");
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSwriteNullDataBuffer() throws Throwable {
+ HDFLibrary.VSwrite(0, null, 0, 0);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSwriteIllegalId() throws Throwable {
+ HDFLibrary.VSwrite(-1, new byte[] { }, 0, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSattrinfoNullName() throws Throwable {
+ HDFLibrary.VSattrinfo(0, 0, 0, null, new int[3]);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSattrinfoNullArgs() throws Throwable {
+ HDFLibrary.VSattrinfo(0, 0, 0, new String[1], null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSattrinfoIllegalId() throws Throwable {
+ HDFLibrary.VSattrinfo(-1, 0, 0, new String[1], new int[3]);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVSattrinfoIllegalNameArgument() throws Throwable {
+ HDFLibrary.VSattrinfo(0, 0, 0, new String[0], new int[3]);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVSattrinfoIllegalAttributeArgs() throws Throwable {
+ HDFLibrary.VSattrinfo(0, 0, 0, new String[1], new int[1]);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSfindexNullName() throws Throwable {
+ HDFLibrary.VSfindex(0, null, new int[1]);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSfindexNullIndexArray() throws Throwable {
+ HDFLibrary.VSfindex(0, "", null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSfindexIllegalId() throws Throwable {
+ HDFLibrary.VSfindex(-1, "", new int[1]);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSfindattrNullName() throws Throwable {
+ HDFLibrary.VSfindattr(0, 0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSfindattrIllegalId() throws Throwable {
+ HDFLibrary.VSfindattr(-1, 0, "");
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVSfnattrsIllegalId() throws Throwable {
+// HDFLibrary.VSfnattrs(-1, 0);
+// }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSgetattrNullData() throws Throwable {
+ HDFLibrary.VSgetattr(0, 0, 0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSgetattrIllegalId() throws Throwable {
+ HDFLibrary.VSgetattr(-1, 0, 0, new byte[] { });
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVSisattrIllegalId() throws Throwable {
+// HDFLibrary.VSisattr(-1);
+// }
+
+// @Test(expected = HDFException.class)
+// public void testVSnattrsIllegalId() throws Throwable {
+// HDFLibrary.VSnattrs(-1);
+// }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSsetattrNullAttributeName() throws Throwable {
+ HDFLibrary.VSsetattr(0, 0, null, 0, 0, new byte[] { });
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVSsetattrNullAttributeName2() throws Throwable {
+ HDFLibrary.VSsetattr(0, 0, null, 0, 0, "");
+ }
+
+// @Test(expected = NullPointerException.class)
+// public void testVSsetattrNullValues() throws Throwable {
+// HDFLibrary.VSsetattr(0, 0, "", 0, 0, null);
+// }
+
+ @Test(expected = HDFException.class)
+ public void testVSsetattrIllegalId() throws Throwable {
+ HDFLibrary.VSsetattr(-1, 0, "", 0, 0, new byte[] { });
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVSsetattrIllegalId2() throws Throwable {
+ HDFLibrary.VSsetattr(-1, 0, "", 0, 0, "");
+ }
+}
diff --git a/java/test/TestH4Vparams.java b/java/test/TestH4Vparams.java
new file mode 100644
index 0000000..2f2e2f1
--- /dev/null
+++ b/java/test/TestH4Vparams.java
@@ -0,0 +1,404 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF Products. The full HDF copyright *
+ * notice, including terms governing use, modification, and redistribution, *
+ * is contained in the file, COPYING. COPYING can be found at the root of *
+ * the source code distribution tree. You can also access it online at *
+ * http://www.hdfgroup.org/products/licenses.html. If you do not have *
+ * access to the file, you may request a copy from help at hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdflib.HDFLibrary;
+import hdf.hdflib.HDFException;
+import hdf.hdflib.HDFChunkInfo;
+import hdf.hdflib.HDFConstants;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH4Vparams {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void showTestName() {
+ System.out.print(testname.getMethodName());
+ }
+
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVQueryrefIllegalVKey() throws Throwable {
+// HDFLibrary.VQueryref(-1);
+// }
+
+// @Test(expected = HDFException.class)
+// public void testVQuerytagIllegalVKey() throws Throwable {
+// HDFLibrary.VQuerytag(-1);
+// }
+
+// @Test(expected = HDFException.class)
+// public void testVstartIllegalId() throws Throwable {
+// HDFLibrary.Vstart(-1);
+// }
+
+ @Test(expected = NullPointerException.class)
+ public void testVattachNullAccess() throws Throwable {
+ HDFLibrary.Vattach(0, 0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVattachIllegalId() throws Throwable {
+ String str = "";
+ HDFLibrary.Vattach(-1, 0, str);
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVdetachIllegalId() throws Throwable {
+// HDFLibrary.Vdetach(-1);
+// }
+
+// @Test(expected = HDFException.class)
+// public void testVendIllegalId() throws Throwable {
+// HDFLibrary.Vend(-1);
+// }
+
+// @Test(expected = HDFException.class)
+// public void testVgetidIllegalId() throws Throwable {
+// HDFLibrary.Vgetid(-1, 0);
+// }
+
+ @Test(expected = NullPointerException.class)
+ public void testVgetclassNullClassName() throws Throwable {
+ HDFLibrary.Vgetclass(0, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVgetclassIllegalArgument() throws Throwable {
+ HDFLibrary.Vgetclass(0, new String[0]);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVgetclassIllegalId() throws Throwable {
+ HDFLibrary.Vgetclass(-1, new String[1]);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVgetnameNullName() throws Throwable {
+ HDFLibrary.Vgetname(0, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVgetnameIllegalArgument() throws Throwable {
+ HDFLibrary.Vgetname(0, new String[0]);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVgetnameIllegalId() throws Throwable {
+ HDFLibrary.Vgetname(-1, new String[1]);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVisvgIllegalId() throws Throwable {
+ HDFLibrary.Visvg(-1, 0);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVisvsIllegalId() throws Throwable {
+ HDFLibrary.Visvs(-1, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVgettagrefsNullTags() throws Throwable {
+ int arraySize = 10;
+ HDFLibrary.Vgettagrefs(0, null, new int[arraySize], arraySize);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVgettagrefsNullRefs() throws Throwable {
+ int arraySize = 10;
+ HDFLibrary.Vgettagrefs(0, new int[arraySize], null, arraySize);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVgettagrefsIllegalTagsArgument() throws Throwable {
+ int arraySize = 10;
+ HDFLibrary.Vgettagrefs(0, new int[1], new int[arraySize], arraySize);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVgettagrefsIllegalRefsArgument() throws Throwable {
+ int arraySize = 10;
+ HDFLibrary.Vgettagrefs(0, new int[arraySize], new int[1], arraySize);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVgettagrefsIllegalId() throws Throwable {
+ int arraySize = 10;
+ HDFLibrary.Vgettagrefs(-1, new int[arraySize], new int[arraySize], arraySize);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVgettagrefNullTagRef() throws Throwable {
+ HDFLibrary.Vgettagref(0, 0, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVgettagrefIllegalTagRefArgument() throws Throwable {
+ HDFLibrary.Vgettagref(0, 0, new int[1]);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVgettagrefIllegalId() throws Throwable {
+ HDFLibrary.Vgettagref(-1, 0, new int[2]);
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVntagrefsIllegalId() throws Throwable {
+// HDFLibrary.Vntagrefs(-1);
+// }
+
+// @Test(expected = HDFException.class)
+// public void testVinqtagrefIllegalId() throws Throwable {
+// HDFLibrary.Vinqtagref(-1, 0, 0);
+// }
+
+ @Test(expected = NullPointerException.class)
+ public void testVloneNullRefArray() throws Throwable {
+ HDFLibrary.Vlone(0, null, 0);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVloneIllegalSizeRefArray() throws Throwable {
+ int arraySize = 10;
+ HDFLibrary.Vlone(0, new int[arraySize - 1], arraySize);
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVloneIllegalId() throws Throwable {
+// HDFLibrary.Vlone(-1, new int[10], 10);
+// }
+
+// @Test(expected = HDFException.class)
+// public void testVaddtagrefIllegalId() throws Throwable {
+// HDFLibrary.Vaddtagref(-1, 0, 0);
+// }
+
+// @Test(expected = HDFException.class)
+// public void testVdeletetagrefIllegalId() throws Throwable {
+// HDFLibrary.Vdeletetagref(-1, 0, 0);
+// }
+
+ @Test(expected = NullPointerException.class)
+ public void testVfindNullVGroupName() throws Throwable {
+ HDFLibrary.Vfind(0, null);
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVfindIllegalId() throws Throwable {
+// String str = "";
+// HDFLibrary.Vfind(-1, str);
+// }
+
+ @Test(expected = NullPointerException.class)
+ public void testVfindclassNullClassName() throws Throwable {
+ HDFLibrary.Vfindclass(0, null);
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVfindclassIllegalId() throws Throwable {
+// String str = "";
+// HDFLibrary.Vfindclass(-1, str);
+// }
+
+ @Test(expected = NullPointerException.class)
+ public void testVflocateNullClassName() throws Throwable {
+ HDFLibrary.Vflocate(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVflocateIllegalKey() throws Throwable {
+ String str = "";
+ HDFLibrary.Vflocate(-1, str);
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVgetnextIllegalKey() throws Throwable {
+// HDFLibrary.Vgetnext(-1, 0);
+// }
+
+ @Test(expected = NullPointerException.class)
+ public void testVinquireNullNEntries() throws Throwable {
+ HDFLibrary.Vinquire(0, null, new String[1]);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVinquireNullVGroupName() throws Throwable {
+ HDFLibrary.Vinquire(0, new int[1], null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVinquireIllegalNEntriesArgument() throws Throwable {
+ HDFLibrary.Vinquire(0, new int[0], new String[1]);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVinquireIllegalVGroupNameArgument() throws Throwable {
+ HDFLibrary.Vinquire(0, new int[1], new String[0]);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVinquireIllegalId() throws Throwable {
+ HDFLibrary.Vinquire(-1, new int[1], new String[1]);
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVinsertIllegalVGroupId() throws Throwable {
+// HDFLibrary.Vinsert(-1, 0);
+// }
+
+// @Test(expected = HDFException.class)
+// public void testVinsertIllegalVId() throws Throwable {
+// HDFLibrary.Vinsert(0, -1);
+// }
+
+// @Test(expected = HDFException.class)
+// public void testVnrefsIllegalKey() throws Throwable {
+// HDFLibrary.Vnrefs(-1, 0);
+// }
+
+ @Test(expected = NullPointerException.class)
+ public void testVsetclassNullClassName() throws Throwable {
+ HDFLibrary.Vsetclass(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVsetclassIllegalId() throws Throwable {
+ String str = "";
+ HDFLibrary.Vsetclass(-1, str);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVsetnameNullName() throws Throwable {
+ HDFLibrary.Vsetname(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVsetnameIllegalId() throws Throwable {
+ String str = "";
+ HDFLibrary.Vsetname(-1, str);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVattrinfoNullName() throws Throwable {
+ HDFLibrary.Vattrinfo(0, 0, null, new int[5]);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVattrinfoNullArgv() throws Throwable {
+ HDFLibrary.Vattrinfo(0, 0, new String[1], null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVattrinfoIllegalNameArgument() throws Throwable {
+ HDFLibrary.Vattrinfo(0, 0, new String[0], new int[5]);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testVattrinfoIllegalArgvArgument() throws Throwable {
+ HDFLibrary.Vattrinfo(0, 0, new String[1], new int[1]);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVattrinfoIllegalId() throws Throwable {
+ HDFLibrary.Vattrinfo(-1, 0, new String[1], new int[5]);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVfindattrNullName() throws Throwable {
+ HDFLibrary.Vfindattr(0, null);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVfindattrIllegalId() throws Throwable {
+ String str = "";
+ HDFLibrary.Vfindattr(-1, str);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVgetattrNullData() throws Throwable {
+ byte[] byteArray = null;
+ HDFLibrary.Vgetattr(0, 0, byteArray);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVgetattrIllegalId() throws Throwable {
+ byte[] byteArray = new byte[10];
+ HDFLibrary.Vgetattr(-1, 0, byteArray);
+ }
+
+// @Test(expected = HDFException.class)
+// public void testVgetversionIllegalId() throws Throwable {
+// HDFLibrary.Vgetversion(-1);
+// }
+
+// @Test(expected = HDFException.class)
+// public void testVnattrsIllegalId() throws Throwable {
+// HDFLibrary.Vnattrs(-1);
+// }
+
+ @Test(expected = NullPointerException.class)
+ public void testVsetattrNullName() throws Throwable {
+ String values = "";
+ HDFLibrary.Vsetattr(0, null, 0, 0, values);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVsetattrNullName2() throws Throwable {
+ HDFLibrary.Vsetattr(0, null, 0, 0, new byte[10]);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVsetattrNullValuesString() throws Throwable {
+ String str = "";
+ String values = null;
+ HDFLibrary.Vsetattr(0, str, 0, 0, values);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testVsetattrNullDataByteArray() throws Throwable {
+ String str = "";
+ byte[] data = null;
+ HDFLibrary.Vsetattr(0, str, 0, 0, data);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVsetattrIllegalId() throws Throwable {
+ String str = "";
+ String values = "";
+ HDFLibrary.Vsetattr(-1, str, 0, 0, values);
+ }
+
+ @Test(expected = HDFException.class)
+ public void testVsetattrIllegalId2() throws Throwable {
+ String str = "";
+ HDFLibrary.Vsetattr(-1, str, 0, 0, new byte[10]);
+ }
+}
diff --git a/java/test/junit.sh.in b/java/test/junit.sh.in
new file mode 100644
index 0000000..cea2113
--- /dev/null
+++ b/java/test/junit.sh.in
@@ -0,0 +1,260 @@
+#! /bin/sh
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF Java Products. The full HDF Java copyright
+# notice, including terms governing use, modification, and redistribution,
+# is contained in the file, COPYING. COPYING can be found at the root of
+# the source code distribution tree. You can also access it online at
+# http://www.hdfgroup.org/products/licenses.html. If you do not have
+# access to the file, you may request a copy from help at hdfgroup.org.
+#
+
+top_builddir=@top_builddir@
+top_srcdir=@top_srcdir@
+srcdir=@srcdir@
+
+TESTNAME=JUnitInterface
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+
+# Set up default variable values if not supplied by the user.
+RM='rm -rf'
+CMP='cmp'
+DIFF='diff -c'
+CP='cp'
+DIRNAME='dirname'
+LS='ls'
+AWK='awk'
+
+nerrors=0
+verbose=yes
+
+# setup my machine information.
+myos=`uname -s`
+
+# where the libs exist
+HDFLIB_HOME="$top_srcdir/java/lib"
+BLDLIBDIR="$top_builddir/java/lib"
+BLDDIR="."
+HDFTEST_HOME="$top_srcdir/java/test"
+JARFILE=jar at PACKAGE_TARNAME@- at PACKAGE_VERSION@.jar
+TESTJARFILE=jar at PACKAGE_TARNAME@test.jar
+test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR
+
+######################################################################
+# library files
+# --------------------------------------------------------------------
+# All the library files copy from source directory to test directory
+# NOTE: Keep this framework to add/remove test files.
+# This list are also used for checking exist.
+# Comment '#' without space can be used.
+# --------------------------------------------------------------------
+LIST_LIBRARY_FILES="
+$HDFLIB_HOME/hamcrest-core.jar
+$HDFLIB_HOME/junit.jar
+$HDFLIB_HOME/slf4j-api-1.7.5.jar
+$HDFLIB_HOME/ext/slf4j-simple-1.7.5.jar
+$top_builddir/hdf/src/.libs/libdf.*
+$top_builddir/mfhdf/libsrc/.libs/libmfhdf.*
+$top_builddir/java/src/jni/.libs/libhdf_java.*
+$top_builddir/java/src/$JARFILE
+"
+LIST_DATA_FILES="
+$HDFTEST_HOME/JUnit-interface.txt
+$HDFTEST_HOME/JUnit-interface.ert
+"
+
+expect="JUnit-interface.txt"
+actual="JUnit-interface.out"
+actual_err="JUnit-interface.err"
+actual_ext="JUnit-interface.ext"
+
+#
+# copy files from source dirs to test dir
+#
+COPY_LIBFILES="$LIST_LIBRARY_FILES"
+
+COPY_LIBFILES_TO_BLDLIBDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_LIBFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDLIBDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_LIBFILES_AND_BLDLIBDIR()
+{
+ # skip rm if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $HDFLIB_HOME/junit.jar`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $RM $BLDLIBDIR
+ fi
+}
+
+COPY_DATAFILES="$LIST_DATA_FILES"
+
+COPY_DATAFILES_TO_BLDDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_DATAFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_DATAFILES_AND_BLDDIR()
+{
+ $RM $BLDDIR/JUnit-interface.out
+ $RM $BLDDIR/JUnit-interface.err
+ $RM $BLDDIR/JUnit-interface.ext
+ $RM $BLDDIR/JUnit-interface.txt
+}
+
+# Print a line-line message left justified in a field of 70 characters
+# beginning with the word "Testing".
+#
+TESTING() {
+ SPACES=" "
+ echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
+}
+
+# where Java is installed (requires jdk1.7.x)
+JAVAEXE=@JAVA@
+JAVAEXEFLAGS=@H4_JAVAFLAGS@
+
+###############################################################################
+# DO NOT MODIFY BELOW THIS LINE
+###############################################################################
+
+# prepare for test
+COPY_LIBFILES_TO_BLDLIBDIR
+COPY_DATAFILES_TO_BLDDIR
+
+CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/junit.jar:"$BLDLIBDIR"/hamcrest-core.jar:"$BLDLIBDIR"/slf4j-api-1.7.5.jar:"$BLDLIBDIR"/slf4j-simple-1.7.5.jar:"$TESTJARFILE""
+
+TEST=/usr/bin/test
+if [ ! -x /usr/bin/test ]
+then
+TEST=`which test`
+fi
+
+if $TEST -z "$CLASSPATH"; then
+ CLASSPATH=""
+fi
+CLASSPATH=$CPATH":"$CLASSPATH
+export CLASSPATH
+
+if $TEST -n "$JAVAPATH" ; then
+ PATH=$JAVAPATH":"$PATH
+ export PATH
+fi
+
+if $TEST -e /bin/uname; then
+ os_name=`/bin/uname -s`
+elif $TEST -e /usr/bin/uname; then
+ os_name=`/usr/bin/uname -s`
+else
+ os_name=unknown
+fi
+
+if $TEST -z "$LD_LIBRARY_PATH" ; then
+ LD_LIBRARY_PATH=""
+fi
+
+case $os_name in
+ Darwin)
+ DYLD_LIBRARY_PATH=$BLDLIBDIR:$DYLD_LIBRARY_PATH
+ export DYLD_LIBRARY_PATH
+ LD_LIBRARY_PATH=$DYLD_LIBRARY_PATH
+ ;;
+ *)
+ LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH
+ ;;
+esac
+
+export LD_LIBRARY_PATH
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH -ea org.junit.runner.JUnitCore test.TestAll"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH -ea org.junit.runner.JUnitCore test.TestAll 1>$actual_ext 2>$actual_err)
+
+# Extract file name, line number, version and thread IDs because they may be different
+sed -e 's/thread [0-9]*/thread (IDs)/' -e 's/: .*\.c /: (file name) /' \
+ -e 's/line [0-9]*/line (number)/' \
+ -e 's/Time: [0-9]*\.[0-9]*/Time: XXXX/' \
+ -e 's/v[1-9]*\.[0-9]*\./version (number)\./' \
+ -e 's/[1-9]*\.[0-9]*\.[0-9]*[^)]*/version (number)/' \
+ $actual_ext > $actual
+
+# SunOS does not support this. Skip it.
+if [ $myos = SunOS ]; then
+ echo " SKIPPED"
+else
+ if $CMP $expect $actual; then
+ echo " PASSED"
+ else
+ echo "*FAILED*"
+ echo " Expected result differs from actual result"
+ nerrors="`expr $nerrors + 1`"
+ test yes = "$verbose" && $DIFF $expect $actual |sed 's/^/ /'
+ fi
+fi
+
+
+# Clean up temporary files/directories
+CLEAN_LIBFILES_AND_BLDLIBDIR
+CLEAN_DATAFILES_AND_BLDDIR
+
+# Report test results and exit
+if test $nerrors -eq 0 ; then
+ echo "All $TESTNAME tests passed."
+ exit $EXIT_SUCCESS
+else
+ echo "$TESTNAME tests failed with $nerrors errors."
+ exit $EXIT_FAILURE
+fi
diff --git a/libhdf4.settings.in b/libhdf4.settings.in
index 1d9bbb9..a932410 100644
--- a/libhdf4.settings.in
+++ b/libhdf4.settings.in
@@ -32,6 +32,9 @@ Languages:
@HDF_BUILD_FORTRAN_TRUE@ Fortran Compiler: @F77_VERSION@
@HDF_BUILD_FORTRAN_TRUE@ FFLAGS: @FFLAGS@
+ Java: @HDF_JAVA@
+ at BUILD_JAVA_CONDITIONAL_TRUE@ Java Compiler: @JAVA_VERSION@
+
Features:
---------
SZIP compression: @SZIP_INFO@
diff --git a/m4/ax_check_class.m4 b/m4/ax_check_class.m4
new file mode 100644
index 0000000..098aa77
--- /dev/null
+++ b/m4/ax_check_class.m4
@@ -0,0 +1,144 @@
+# ===========================================================================
+# http://www.gnu.org/software/autoconf-archive/ax_check_class.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+# AX_CHECK_CLASS
+#
+# DESCRIPTION
+#
+# AX_CHECK_CLASS tests the existence of a given Java class, either in a
+# jar or in a '.class' file.
+#
+# *Warning*: its success or failure can depend on a proper setting of the
+# CLASSPATH env. variable.
+#
+# Note: This is part of the set of autoconf M4 macros for Java programs.
+# It is VERY IMPORTANT that you download the whole set, some macros depend
+# on other. Unfortunately, the autoconf archive does not support the
+# concept of set of macros, so I had to break it for submission. The
+# general documentation, as well as the sample configure.in, is included
+# in the AX_PROG_JAVA macro.
+#
+# LICENSE
+#
+# Copyright (c) 2008 Stephane Bortzmeyer <bortzmeyer at pasteur.fr>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
+# Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+# As a special exception, the respective Autoconf Macro's copyright owner
+# gives unlimited permission to copy, distribute and modify the configure
+# scripts that are the output of Autoconf when processing the Macro. You
+# need not follow the terms of the GNU General Public License when using
+# or distributing such scripts, even though portions of the text of the
+# Macro appear in them. The GNU General Public License (GPL) does govern
+# all other use of the material that constitutes the Autoconf Macro.
+#
+# This special exception to the GPL applies to versions of the Autoconf
+# Macro released by the Autoconf Archive. When you make and distribute a
+# modified version of the Autoconf Macro, you may extend this special
+# exception to the GPL to apply to your modified version as well.
+
+#serial 7
+
+AU_ALIAS([AC_CHECK_CLASS], [AX_CHECK_CLASS])
+AC_DEFUN([AX_CHECK_CLASS],[
+AC_REQUIRE([AX_PROG_JAVA])
+ac_var_name=`echo $1 | sed 's/\./_/g'`
+dnl Normaly I'd use a AC_CACHE_CHECK here but since the variable name is
+dnl dynamic I need an extra level of extraction
+AC_MSG_CHECKING([for $1 class])
+AC_CACHE_VAL(ax_cv_class_$ac_var_name, [
+if test x$ac_cv_prog_uudecode_base64 = xyes; then
+dnl /**
+dnl * Test.java: used to test dynamicaly if a class exists.
+dnl */
+dnl public class Test
+dnl {
+dnl
+dnl public static void
+dnl main( String[] argv )
+dnl {
+dnl Class lib;
+dnl if (argv.length < 1)
+dnl {
+dnl System.err.println ("Missing argument");
+dnl System.exit (77);
+dnl }
+dnl try
+dnl {
+dnl lib = Class.forName (argv[0]);
+dnl }
+dnl catch (ClassNotFoundException e)
+dnl {
+dnl System.exit (1);
+dnl }
+dnl lib = null;
+dnl System.exit (0);
+dnl }
+dnl
+dnl }
+cat << \EOF > Test.uue
+begin-base64 644 Test.class
+yv66vgADAC0AKQcAAgEABFRlc3QHAAQBABBqYXZhL2xhbmcvT2JqZWN0AQAE
+bWFpbgEAFihbTGphdmEvbGFuZy9TdHJpbmc7KVYBAARDb2RlAQAPTGluZU51
+bWJlclRhYmxlDAAKAAsBAANlcnIBABVMamF2YS9pby9QcmludFN0cmVhbTsJ
+AA0ACQcADgEAEGphdmEvbGFuZy9TeXN0ZW0IABABABBNaXNzaW5nIGFyZ3Vt
+ZW50DAASABMBAAdwcmludGxuAQAVKExqYXZhL2xhbmcvU3RyaW5nOylWCgAV
+ABEHABYBABNqYXZhL2lvL1ByaW50U3RyZWFtDAAYABkBAARleGl0AQAEKEkp
+VgoADQAXDAAcAB0BAAdmb3JOYW1lAQAlKExqYXZhL2xhbmcvU3RyaW5nOylM
+amF2YS9sYW5nL0NsYXNzOwoAHwAbBwAgAQAPamF2YS9sYW5nL0NsYXNzBwAi
+AQAgamF2YS9sYW5nL0NsYXNzTm90Rm91bmRFeGNlcHRpb24BAAY8aW5pdD4B
+AAMoKVYMACMAJAoAAwAlAQAKU291cmNlRmlsZQEACVRlc3QuamF2YQAhAAEA
+AwAAAAAAAgAJAAUABgABAAcAAABtAAMAAwAAACkqvgSiABCyAAwSD7YAFBBN
+uAAaKgMyuAAeTKcACE0EuAAaAUwDuAAasQABABMAGgAdACEAAQAIAAAAKgAK
+AAAACgAAAAsABgANAA4ADgATABAAEwASAB4AFgAiABgAJAAZACgAGgABACMA
+JAABAAcAAAAhAAEAAQAAAAUqtwAmsQAAAAEACAAAAAoAAgAAAAQABAAEAAEA
+JwAAAAIAKA==
+====
+EOF
+ if $UUDECODE Test.uue; then
+ :
+ else
+ echo "configure: __oline__: uudecode had trouble decoding base 64 file 'Test.uue'" >&AS_MESSAGE_LOG_FD
+ echo "configure: failed file was:" >&AS_MESSAGE_LOG_FD
+ cat Test.uue >&AS_MESSAGE_LOG_FD
+ ac_cv_prog_uudecode_base64=no
+ fi
+ rm -f Test.uue
+ if AC_TRY_COMMAND($JAVA $JAVAFLAGS Test $1) >/dev/null 2>&1; then
+ eval "ac_cv_class_$ac_var_name=yes"
+ else
+ eval "ac_cv_class_$ac_var_name=no"
+ fi
+ rm -f Test.class
+else
+ AX_TRY_COMPILE_JAVA([$1], , [eval "ac_cv_class_$ac_var_name=yes"],
+ [eval "ac_cv_class_$ac_var_name=no"])
+fi
+eval "ac_var_val=$`eval echo ac_cv_class_$ac_var_name`"
+eval "HAVE_$ac_var_name=$`echo ac_cv_class_$ac_var_val`"
+HAVE_LAST_CLASS=$ac_var_val
+if test x$ac_var_val = xyes; then
+ ifelse([$2], , :, [$2])
+else
+ ifelse([$3], , :, [$3])
+fi
+])
+dnl for some reason the above statment didn't fall though here?
+dnl do scripts have variable scoping?
+eval "ac_var_val=$`eval echo ac_cv_class_$ac_var_name`"
+AC_MSG_RESULT($ac_var_val)
+])
diff --git a/m4/ax_check_classpath.m4 b/m4/ax_check_classpath.m4
new file mode 100644
index 0000000..3c9081a
--- /dev/null
+++ b/m4/ax_check_classpath.m4
@@ -0,0 +1,60 @@
+# ===========================================================================
+# http://www.gnu.org/software/autoconf-archive/ax_check_classpath.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+# AX_CHECK_CLASSPATH
+#
+# DESCRIPTION
+#
+# AX_CHECK_CLASSPATH just displays the CLASSPATH, for the edification of
+# the user.
+#
+# Note: This is part of the set of autoconf M4 macros for Java programs.
+# It is VERY IMPORTANT that you download the whole set, some macros depend
+# on other. Unfortunately, the autoconf archive does not support the
+# concept of set of macros, so I had to break it for submission. The
+# general documentation, as well as the sample configure.in, is included
+# in the AX_PROG_JAVA macro.
+#
+# LICENSE
+#
+# Copyright (c) 2008 Stephane Bortzmeyer <bortzmeyer at pasteur.fr>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
+# Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+# As a special exception, the respective Autoconf Macro's copyright owner
+# gives unlimited permission to copy, distribute and modify the configure
+# scripts that are the output of Autoconf when processing the Macro. You
+# need not follow the terms of the GNU General Public License when using
+# or distributing such scripts, even though portions of the text of the
+# Macro appear in them. The GNU General Public License (GPL) does govern
+# all other use of the material that constitutes the Autoconf Macro.
+#
+# This special exception to the GPL applies to versions of the Autoconf
+# Macro released by the Autoconf Archive. When you make and distribute a
+# modified version of the Autoconf Macro, you may extend this special
+# exception to the GPL to apply to your modified version as well.
+
+#serial 5
+
+AU_ALIAS([AC_CHECK_CLASSPATH], [AX_CHECK_CLASSPATH])
+AC_DEFUN([AX_CHECK_CLASSPATH],[
+if test "x$CLASSPATH" = x; then
+ echo "You have no CLASSPATH, I hope it is good"
+else
+ echo "You have CLASSPATH $CLASSPATH, hope it is correct"
+fi
+])
diff --git a/m4/ax_check_java_home.m4 b/m4/ax_check_java_home.m4
new file mode 100644
index 0000000..cfe8f58
--- /dev/null
+++ b/m4/ax_check_java_home.m4
@@ -0,0 +1,80 @@
+# ===========================================================================
+# http://www.gnu.org/software/autoconf-archive/ax_check_java_home.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+# AX_CHECK_JAVA_HOME
+#
+# DESCRIPTION
+#
+# Check for Sun Java (JDK / JRE) installation, where the 'java' VM is in.
+# If found, set environment variable JAVA_HOME = Java installation home,
+# else left JAVA_HOME untouch, which in most case means JAVA_HOME is
+# empty.
+#
+# LICENSE
+#
+# Copyright (c) 2008 Gleen Salmon <gleensalmon at yahoo.com>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
+# Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+# As a special exception, the respective Autoconf Macro's copyright owner
+# gives unlimited permission to copy, distribute and modify the configure
+# scripts that are the output of Autoconf when processing the Macro. You
+# need not follow the terms of the GNU General Public License when using
+# or distributing such scripts, even though portions of the text of the
+# Macro appear in them. The GNU General Public License (GPL) does govern
+# all other use of the material that constitutes the Autoconf Macro.
+#
+# This special exception to the GPL applies to versions of the Autoconf
+# Macro released by the Autoconf Archive. When you make and distribute a
+# modified version of the Autoconf Macro, you may extend this special
+# exception to the GPL to apply to your modified version as well.
+
+#serial 6
+
+AU_ALIAS([AC_CHECK_JAVA_HOME], [AX_CHECK_JAVA_HOME])
+
+AC_DEFUN([AX_CHECK_JAVA_HOME],
+[AC_MSG_CHECKING([for JAVA_HOME])
+# We used a fake loop so that we can use "break" to exit when the result
+# is found.
+while true
+do
+ # If the user defined JAVA_HOME, don't touch it.
+ test "${JAVA_HOME+set}" = set && break
+
+ # On Mac OS X 10.5 and following, run /usr/libexec/java_home to get
+ # the value of JAVA_HOME to use.
+ # (http://developer.apple.com/library/mac/#qa/qa2001/qa1170.html).
+ JAVA_HOME=`/usr/libexec/java_home 2>/dev/null`
+ test x"$JAVA_HOME" != x && break
+
+ # See if we can find the java executable, and compute from there.
+ TRY_JAVA_HOME=`ls -dr /usr/java/* 2> /dev/null | head -n 1`
+ if test x$TRY_JAVA_HOME != x; then
+ PATH=$PATH:$TRY_JAVA_HOME/bin
+ fi
+ AC_PATH_PROG([JAVA_PATH_NAME], [java])
+ if test "x$JAVA_PATH_NAME" != x; then
+ JAVA_HOME=`echo $JAVA_PATH_NAME | sed "s/\(.*\)[[/]]bin[[/]]java.*/\1/"`
+ break
+ fi
+
+ AC_MSG_NOTICE([Could not compute JAVA_HOME])
+ break
+done
+AC_MSG_RESULT([$JAVA_HOME])
+])
diff --git a/m4/ax_check_junit.m4 b/m4/ax_check_junit.m4
new file mode 100644
index 0000000..39b52d1
--- /dev/null
+++ b/m4/ax_check_junit.m4
@@ -0,0 +1,70 @@
+# ===========================================================================
+# http://www.gnu.org/software/autoconf-archive/ax_check_junit.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+# AX_CHECK_JUNIT
+#
+# DESCRIPTION
+#
+# AX_CHECK_JUNIT tests the availability of the Junit testing framework,
+# and set some variables for conditional compilation of the test suite by
+# automake.
+#
+# If available, JUNIT is set to a command launching the text based user
+# interface of Junit, @JAVA_JUNIT@ is set to $JAVA_JUNIT and @TESTS_JUNIT@
+# is set to $TESTS_JUNIT, otherwise they are set to empty values.
+#
+# You can use these variables in your Makefile.am file like this :
+#
+# # Some of the following classes are built only if junit is available
+# JAVA_JUNIT = Class1Test.java Class2Test.java AllJunitTests.java
+#
+# noinst_JAVA = Example1.java Example2.java @JAVA_JUNIT@
+#
+# EXTRA_JAVA = $(JAVA_JUNIT)
+#
+# TESTS_JUNIT = AllJunitTests
+#
+# TESTS = StandaloneTest1 StandaloneTest2 @TESTS_JUNIT@
+#
+# EXTRA_TESTS = $(TESTS_JUNIT)
+#
+# AllJunitTests :
+# echo "#! /bin/sh" > $@
+# echo "exec @JUNIT@ my.package.name.AllJunitTests" >> $@
+# chmod +x $@
+#
+# LICENSE
+#
+# Copyright (c) 2008 Luc Maisonobe <luc at spaceroots.org>
+#
+# Copying and distribution of this file, with or without modification, are
+# permitted in any medium without royalty provided the copyright notice
+# and this notice are preserved. This file is offered as-is, without any
+# warranty.
+
+#serial 5
+
+AU_ALIAS([AC_CHECK_JUNIT], [AX_CHECK_JUNIT])
+AC_DEFUN([AX_CHECK_JUNIT],[
+AC_CACHE_VAL(ac_cv_prog_JUNIT,[
+AC_CHECK_CLASS(junit.textui.TestRunner)
+if test x"`eval 'echo $ac_cv_class_junit_textui_TestRunner'`" != xno ; then
+ ac_cv_prog_JUNIT='$(CLASSPATH_ENV) $(JAVA) $(JAVAFLAGS) junit.textui.TestRunner'
+fi])
+AC_MSG_CHECKING([for junit])
+if test x"`eval 'echo $ac_cv_prog_JUNIT'`" != x ; then
+ JUNIT="$ac_cv_prog_JUNIT"
+ JAVA_JUNIT='$(JAVA_JUNIT)'
+ TESTS_JUNIT='$(TESTS_JUNIT)'
+else
+ JUNIT=
+ JAVA_JUNIT=
+ TESTS_JUNIT=
+fi
+AC_MSG_RESULT($JAVA_JUNIT)
+AC_SUBST(JUNIT)
+AC_SUBST(JAVA_JUNIT)
+AC_SUBST(TESTS_JUNIT)])
diff --git a/m4/ax_check_rqrd_class.m4 b/m4/ax_check_rqrd_class.m4
new file mode 100644
index 0000000..8f14241
--- /dev/null
+++ b/m4/ax_check_rqrd_class.m4
@@ -0,0 +1,62 @@
+# ===========================================================================
+# http://www.gnu.org/software/autoconf-archive/ax_check_rqrd_class.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+# AX_CHECK_RQRD_CLASS
+#
+# DESCRIPTION
+#
+# AX_CHECK_RQRD_CLASS tests the existence of a given Java class, either in
+# a jar or in a '.class' file and fails if it doesn't exist. Its success
+# or failure can depend on a proper setting of the CLASSPATH env.
+# variable.
+#
+# Note: This is part of the set of autoconf M4 macros for Java programs.
+# It is VERY IMPORTANT that you download the whole set, some macros depend
+# on other. Unfortunately, the autoconf archive does not support the
+# concept of set of macros, so I had to break it for submission. The
+# general documentation, as well as the sample configure.in, is included
+# in the AX_PROG_JAVA macro.
+#
+# LICENSE
+#
+# Copyright (c) 2008 Stephane Bortzmeyer <bortzmeyer at pasteur.fr>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
+# Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+# As a special exception, the respective Autoconf Macro's copyright owner
+# gives unlimited permission to copy, distribute and modify the configure
+# scripts that are the output of Autoconf when processing the Macro. You
+# need not follow the terms of the GNU General Public License when using
+# or distributing such scripts, even though portions of the text of the
+# Macro appear in them. The GNU General Public License (GPL) does govern
+# all other use of the material that constitutes the Autoconf Macro.
+#
+# This special exception to the GPL applies to versions of the Autoconf
+# Macro released by the Autoconf Archive. When you make and distribute a
+# modified version of the Autoconf Macro, you may extend this special
+# exception to the GPL to apply to your modified version as well.
+
+#serial 5
+
+AU_ALIAS([AC_CHECK_RQRD_CLASS], [AX_CHECK_RQRD_CLASS])
+AC_DEFUN([AX_CHECK_RQRD_CLASS],[
+CLASS=`echo $1|sed 's/\./_/g'`
+AC_CHECK_CLASS($1)
+if test "$HAVE_LAST_CLASS" = "no"; then
+ AC_MSG_ERROR([Required class $1 missing, exiting.])
+fi
+])
diff --git a/m4/ax_java_check_class.m4 b/m4/ax_java_check_class.m4
new file mode 100644
index 0000000..917638a
--- /dev/null
+++ b/m4/ax_java_check_class.m4
@@ -0,0 +1,85 @@
+# ===========================================================================
+# http://www.gnu.org/software/autoconf-archive/ax_java_check_class.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+# AX_JAVA_CHECK_CLASS(<class>,<action-if-found>,<action-if-not-found>)
+#
+# DESCRIPTION
+#
+# Test if a Java class is available. Based on AX_PROG_JAVAC_WORKS. This
+# version uses a cache variable which is both compiler, options and
+# classpath dependent (so if you switch from javac to gcj it correctly
+# notices and redoes the test).
+#
+# The macro tries to compile a minimal program importing <class>. Some
+# newer compilers moan about the failure to use this but fail or produce a
+# class file anyway. All moaing is sunk to /dev/null since I only wanted
+# to know if the class could be imported. This is a recommended followup
+# to AX_CHECK_JAVA_PLUGIN with classpath appropriately adjusted.
+#
+# LICENSE
+#
+# Copyright (c) 2008 Duncan Simpson <dps at simpson.demon.co.uk>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
+# Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+# As a special exception, the respective Autoconf Macro's copyright owner
+# gives unlimited permission to copy, distribute and modify the configure
+# scripts that are the output of Autoconf when processing the Macro. You
+# need not follow the terms of the GNU General Public License when using
+# or distributing such scripts, even though portions of the text of the
+# Macro appear in them. The GNU General Public License (GPL) does govern
+# all other use of the material that constitutes the Autoconf Macro.
+#
+# This special exception to the GPL applies to versions of the Autoconf
+# Macro released by the Autoconf Archive. When you make and distribute a
+# modified version of the Autoconf Macro, you may extend this special
+# exception to the GPL to apply to your modified version as well.
+
+#serial 9
+
+AU_ALIAS([DPS_JAVA_CHECK_CLASS], [AX_JAVA_CHECK_CLASS])
+AC_DEFUN([AX_JAVA_CHECK_CLASS],[
+m4_define([cache_val],[m4_translit(ax_cv_have_java_class_$1, " ." ,"__")])
+if test "x$CLASSPATH" != "x"; then
+xtra=" with classpath ${CLASSPATH}"
+xopts=`echo ${CLASSPATH} | ${SED} 's/^ *://'`
+xopts="-classpath $xopts"
+else xtra=""; xopts=""; fi
+cache_var="cache_val"AS_TR_SH([_Jc_${JAVAC}_Cp_${CLASSPATH}])
+AC_CACHE_CHECK([if the $1 class is available$xtra], [$cache_var], [
+JAVA_TEST=Test.java
+CLASS_TEST=Test.class
+cat << \EOF > $JAVA_TEST
+/* [#]xline __oline__ "configure" */
+import $1;
+public class Test {
+}
+EOF
+if AC_TRY_COMMAND($JAVAC $JAVACFLAGS $xopts $JAVA_TEST) >/dev/null 2>&1; then
+ eval "${cache_var}=yes"
+else
+ eval "${cache_var}=no"
+ echo "configure: failed program was:" >&AS_MESSAGE_LOG_FD
+ cat $JAVA_TEST >&AS_MESSAGE_LOG_FD
+fi
+rm -f $JAVA_TEST $CLASS_TEST
+])
+if eval 'test "x$'${cache_var}'" = "xyes"'; then
+$2
+true; else
+$3
+false; fi])
diff --git a/m4/ax_java_options.m4 b/m4/ax_java_options.m4
new file mode 100644
index 0000000..36c10d9
--- /dev/null
+++ b/m4/ax_java_options.m4
@@ -0,0 +1,48 @@
+# ===========================================================================
+# http://www.gnu.org/software/autoconf-archive/ax_java_options.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+# AX_JAVA_OPTIONS
+#
+# DESCRIPTION
+#
+# AX_JAVA_OPTIONS adds configure command line options used for Java m4
+# macros. This Macro is optional.
+#
+# Note: This is part of the set of autoconf M4 macros for Java programs.
+# It is VERY IMPORTANT that you download the whole set, some macros depend
+# on other. Unfortunately, the autoconf archive does not support the
+# concept of set of macros, so I had to break it for submission. The
+# general documentation, as well as the sample configure.in, is included
+# in the AX_PROG_JAVA macro.
+#
+# LICENSE
+#
+# Copyright (c) 2008 Devin Weaver <ktohg at tritarget.com>
+#
+# Copying and distribution of this file, with or without modification, are
+# permitted in any medium without royalty provided the copyright notice
+# and this notice are preserved. This file is offered as-is, without any
+# warranty.
+
+#serial 6
+
+AU_ALIAS([AC_JAVA_OPTIONS], [AX_JAVA_OPTIONS])
+AC_DEFUN([AX_JAVA_OPTIONS],[
+AC_ARG_WITH(java-prefix,
+ [ --with-java-prefix=PFX prefix where Java runtime is installed (optional)])
+AC_ARG_WITH(javac-flags,
+ [ --with-javac-flags=FLAGS flags to pass to the Java compiler (optional)])
+AC_ARG_WITH(java-flags,
+ [ --with-java-flags=FLAGS flags to pass to the Java VM (optional)])
+JAVAPREFIX=$with_java_prefix
+JAVACFLAGS=$with_javac_flags
+JAVAFLAGS=$with_java_flags
+AC_SUBST(JAVAPREFIX)dnl
+AC_SUBST(JAVACFLAGS)dnl
+AC_SUBST(JAVAFLAGS)dnl
+AC_SUBST(JAVA)dnl
+AC_SUBST(JAVAC)dnl
+])
diff --git a/m4/ax_jni_include_dir.m4 b/m4/ax_jni_include_dir.m4
new file mode 100644
index 0000000..becb33a
--- /dev/null
+++ b/m4/ax_jni_include_dir.m4
@@ -0,0 +1,132 @@
+# ===========================================================================
+# http://www.gnu.org/software/autoconf-archive/ax_jni_include_dir.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+# AX_JNI_INCLUDE_DIR
+#
+# DESCRIPTION
+#
+# AX_JNI_INCLUDE_DIR finds include directories needed for compiling
+# programs using the JNI interface.
+#
+# JNI include directories are usually in the Java distribution. This is
+# deduced from the value of $JAVA_HOME, $JAVAC, or the path to "javac", in
+# that order. When this macro completes, a list of directories is left in
+# the variable JNI_INCLUDE_DIRS.
+#
+# Example usage follows:
+#
+# AX_JNI_INCLUDE_DIR
+#
+# for JNI_INCLUDE_DIR in $JNI_INCLUDE_DIRS
+# do
+# CPPFLAGS="$CPPFLAGS -I$JNI_INCLUDE_DIR"
+# done
+#
+# If you want to force a specific compiler:
+#
+# - at the configure.in level, set JAVAC=yourcompiler before calling
+# AX_JNI_INCLUDE_DIR
+#
+# - at the configure level, setenv JAVAC
+#
+# Note: This macro can work with the autoconf M4 macros for Java programs.
+# This particular macro is not part of the original set of macros.
+#
+# LICENSE
+#
+# Copyright (c) 2008 Don Anderson <dda at sleepycat.com>
+#
+# Copying and distribution of this file, with or without modification, are
+# permitted in any medium without royalty provided the copyright notice
+# and this notice are preserved. This file is offered as-is, without any
+# warranty.
+
+#serial 11
+
+AU_ALIAS([AC_JNI_INCLUDE_DIR], [AX_JNI_INCLUDE_DIR])
+AC_DEFUN([AX_JNI_INCLUDE_DIR],[
+
+JNI_INCLUDE_DIRS=""
+
+if test "x$JAVA_HOME" != x; then
+ _JTOPDIR="$JAVA_HOME"
+else
+ if test "x$JAVAC" = x; then
+ JAVAC=javac
+ fi
+ AC_PATH_PROG([_ACJNI_JAVAC], [$JAVAC], [no])
+ if test "x$_ACJNI_JAVAC" = xno; then
+ AC_MSG_ERROR([cannot find JDK; try setting \$JAVAC or \$JAVA_HOME])
+ fi
+ _ACJNI_FOLLOW_SYMLINKS("$_ACJNI_JAVAC")
+ _JTOPDIR=`echo "$_ACJNI_FOLLOWED" | sed -e 's://*:/:g' -e 's:/[[^/]]*$::'`
+fi
+
+case "$host_os" in
+ darwin*) # Apple JDK is at /System location and has headers symlinked elsewhere
+ case "$_JTOPDIR" in
+ /System/Library/Frameworks/JavaVM.framework/*)
+ _JTOPDIR=`echo "$_JTOPDIR" | sed -e 's:/[[^/]]*$::'`
+ _JINC="$_JTOPDIR/Headers";;
+ *) _JINC="$_JTOPDIR/include";;
+ esac;;
+ *) _JINC="$_JTOPDIR/include";;
+esac
+_AS_ECHO_LOG([_JTOPDIR=$_JTOPDIR])
+_AS_ECHO_LOG([_JINC=$_JINC])
+
+# On Mac OS X 10.6.4, jni.h is a symlink:
+# /System/Library/Frameworks/JavaVM.framework/Versions/Current/Headers/jni.h
+# -> ../../CurrentJDK/Headers/jni.h.
+AC_CHECK_FILE([$_JINC/jni.h],
+ [JNI_INCLUDE_DIRS="$JNI_INCLUDE_DIRS $_JINC"],
+ [_JTOPDIR=`echo "$_JTOPDIR" | sed -e 's:/[[^/]]*$::'`
+ AC_CHECK_FILE([$_JTOPDIR/include/jni.h],
+ [JNI_INCLUDE_DIRS="$JNI_INCLUDE_DIRS $_JTOPDIR/include"],
+ AC_MSG_ERROR([cannot find JDK header files]))
+ ])
+
+# get the likely subdirectories for system specific java includes
+case "$host_os" in
+bsdi*) _JNI_INC_SUBDIRS="bsdos";;
+freebsd*) _JNI_INC_SUBDIRS="freebsd";;
+darwin*) _JNI_INC_SUBDIRS="darwin";;
+linux*) _JNI_INC_SUBDIRS="linux genunix";;
+osf*) _JNI_INC_SUBDIRS="alpha";;
+solaris*) _JNI_INC_SUBDIRS="solaris";;
+mingw*) _JNI_INC_SUBDIRS="win32";;
+cygwin*) _JNI_INC_SUBDIRS="win32";;
+*) _JNI_INC_SUBDIRS="genunix";;
+esac
+
+# add any subdirectories that are present
+for JINCSUBDIR in $_JNI_INC_SUBDIRS
+do
+ if test -d "$_JTOPDIR/include/$JINCSUBDIR"; then
+ JNI_INCLUDE_DIRS="$JNI_INCLUDE_DIRS $_JTOPDIR/include/$JINCSUBDIR"
+ fi
+done
+])
+
+# _ACJNI_FOLLOW_SYMLINKS <path>
+# Follows symbolic links on <path>,
+# finally setting variable _ACJNI_FOLLOWED
+# ----------------------------------------
+AC_DEFUN([_ACJNI_FOLLOW_SYMLINKS],[
+# find the include directory relative to the javac executable
+_cur="$1"
+while ls -ld "$_cur" 2>/dev/null | grep " -> " >/dev/null; do
+ AC_MSG_CHECKING([symlink for $_cur])
+ _slink=`ls -ld "$_cur" | sed 's/.* -> //'`
+ case "$_slink" in
+ /*) _cur="$_slink";;
+ # 'X' avoids triggering unwanted echo options.
+ *) _cur=`echo "X$_cur" | sed -e 's/^X//' -e 's:[[^/]]*$::'`"$_slink";;
+ esac
+ AC_MSG_RESULT([$_cur])
+done
+_ACJNI_FOLLOWED="$_cur"
+])# _ACJNI
diff --git a/m4/ax_prog_jar.m4 b/m4/ax_prog_jar.m4
new file mode 100644
index 0000000..3c60fca
--- /dev/null
+++ b/m4/ax_prog_jar.m4
@@ -0,0 +1,49 @@
+# ===========================================================================
+# http://www.gnu.org/software/autoconf-archive/ax_prog_jar.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+# AX_PROG_JAR
+#
+# DESCRIPTION
+#
+# AX_PROG_JAR tests for an existing jar program. It uses the environment
+# variable JAR then tests in sequence various common jar programs.
+#
+# If you want to force a specific compiler:
+#
+# - at the configure.in level, set JAR=yourcompiler before calling
+# AX_PROG_JAR
+#
+# - at the configure level, setenv JAR
+#
+# You can use the JAR variable in your Makefile.in, with @JAR at .
+#
+# Note: This macro depends on the autoconf M4 macros for Java programs. It
+# is VERY IMPORTANT that you download that whole set, some macros depend
+# on other. Unfortunately, the autoconf archive does not support the
+# concept of set of macros, so I had to break it for submission.
+#
+# The general documentation of those macros, as well as the sample
+# configure.in, is included in the AX_PROG_JAVA macro.
+#
+# LICENSE
+#
+# Copyright (c) 2008 Egon Willighagen <e.willighagen at science.ru.nl>
+#
+# Copying and distribution of this file, with or without modification, are
+# permitted in any medium without royalty provided the copyright notice
+# and this notice are preserved. This file is offered as-is, without any
+# warranty.
+
+#serial 7
+
+AU_ALIAS([AC_PROG_JAR], [AX_PROG_JAR])
+AC_DEFUN([AX_PROG_JAR],[
+AS_IF([test "x$JAVAPREFIX" = x],
+ [test "x$JAR" = x && AC_CHECK_PROGS([JAR], [jar])],
+ [test "x$JAR" = x && AC_CHECK_PROGS([JAR], [jar], [], [$JAVAPREFIX/bin])])
+test "x$JAR" = x && AC_MSG_ERROR([no acceptable jar program found in \$PATH])
+AC_PROVIDE([$0])dnl
+])
diff --git a/m4/ax_prog_java.m4 b/m4/ax_prog_java.m4
new file mode 100644
index 0000000..03961db
--- /dev/null
+++ b/m4/ax_prog_java.m4
@@ -0,0 +1,115 @@
+# ===========================================================================
+# http://www.gnu.org/software/autoconf-archive/ax_prog_java.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+# AX_PROG_JAVA
+#
+# DESCRIPTION
+#
+# Here is a summary of the main macros:
+#
+# AX_PROG_JAVAC: finds a Java compiler.
+#
+# AX_PROG_JAVA: finds a Java virtual machine.
+#
+# AX_CHECK_CLASS: finds if we have the given class (beware of CLASSPATH!).
+#
+# AX_CHECK_RQRD_CLASS: finds if we have the given class and stops
+# otherwise.
+#
+# AX_TRY_COMPILE_JAVA: attempt to compile user given source.
+#
+# AX_TRY_RUN_JAVA: attempt to compile and run user given source.
+#
+# AX_JAVA_OPTIONS: adds Java configure options.
+#
+# AX_PROG_JAVA tests an existing Java virtual machine. It uses the
+# environment variable JAVA then tests in sequence various common Java
+# virtual machines. For political reasons, it starts with the free ones.
+# You *must* call [AX_PROG_JAVAC] before.
+#
+# If you want to force a specific VM:
+#
+# - at the configure.in level, set JAVA=yourvm before calling AX_PROG_JAVA
+#
+# (but after AC_INIT)
+#
+# - at the configure level, setenv JAVA
+#
+# You can use the JAVA variable in your Makefile.in, with @JAVA at .
+#
+# *Warning*: its success or failure can depend on a proper setting of the
+# CLASSPATH env. variable.
+#
+# TODO: allow to exclude virtual machines (rationale: most Java programs
+# cannot run with some VM like kaffe).
+#
+# Note: This is part of the set of autoconf M4 macros for Java programs.
+# It is VERY IMPORTANT that you download the whole set, some macros depend
+# on other. Unfortunately, the autoconf archive does not support the
+# concept of set of macros, so I had to break it for submission.
+#
+# A Web page, with a link to the latest CVS snapshot is at
+# <http://www.internatif.org/bortzmeyer/autoconf-Java/>.
+#
+# This is a sample configure.in Process this file with autoconf to produce
+# a configure script.
+#
+# AC_INIT(UnTag.java)
+#
+# dnl Checks for programs.
+# AC_CHECK_CLASSPATH
+# AX_PROG_JAVAC
+# AX_PROG_JAVA
+#
+# dnl Checks for classes
+# AX_CHECK_RQRD_CLASS(org.xml.sax.Parser)
+# AX_CHECK_RQRD_CLASS(com.jclark.xml.sax.Driver)
+#
+# AC_OUTPUT(Makefile)
+#
+# LICENSE
+#
+# Copyright (c) 2008 Stephane Bortzmeyer <bortzmeyer at pasteur.fr>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
+# Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+# As a special exception, the respective Autoconf Macro's copyright owner
+# gives unlimited permission to copy, distribute and modify the configure
+# scripts that are the output of Autoconf when processing the Macro. You
+# need not follow the terms of the GNU General Public License when using
+# or distributing such scripts, even though portions of the text of the
+# Macro appear in them. The GNU General Public License (GPL) does govern
+# all other use of the material that constitutes the Autoconf Macro.
+#
+# This special exception to the GPL applies to versions of the Autoconf
+# Macro released by the Autoconf Archive. When you make and distribute a
+# modified version of the Autoconf Macro, you may extend this special
+# exception to the GPL to apply to your modified version as well.
+
+#serial 9
+
+AU_ALIAS([AC_PROG_JAVA], [AX_PROG_JAVA])
+AC_DEFUN([AX_PROG_JAVA],[
+m4_define([m4_ax_prog_java_list], [kaffe java])dnl
+AS_IF([test "x$JAVAPREFIX" = x],
+ [test x$JAVA = x && AC_CHECK_PROGS([JAVA], [m4_ax_prog_java_list])],
+ [test x$JAVA = x && AC_CHECK_PROGS([JAVA], [m4_ax_prog_java_list], [], [$JAVAPREFIX/bin])])
+test x$JAVA = x && AC_MSG_ERROR([no acceptable Java virtual machine found in \$PATH])
+m4_undefine([m4_ax_prog_java_list])dnl
+AX_PROG_JAVA_WORKS
+AC_PROVIDE([$0])dnl
+])
diff --git a/m4/ax_prog_java_cc.m4 b/m4/ax_prog_java_cc.m4
new file mode 100644
index 0000000..3df064f
--- /dev/null
+++ b/m4/ax_prog_java_cc.m4
@@ -0,0 +1,104 @@
+# ===========================================================================
+# http://www.gnu.org/software/autoconf-archive/ax_prog_java_cc.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+# AX_PROG_JAVA_CC
+#
+# DESCRIPTION
+#
+# Finds the appropriate java compiler on your path. By preference the java
+# compiler is gcj, then jikes then javac.
+#
+# The macro can take one argument specifying a space separated list of
+# java compiler names.
+#
+# For example:
+#
+# AX_PROG_JAVA_CC(javac, gcj)
+#
+# The macro also sets the compiler options variable: JAVA_CC_OPTS to
+# something sensible:
+#
+# - for GCJ it sets it to: @GCJ_OPTS@
+# (if GCJ_OPTS is not yet defined then it is set to "-C")
+#
+# - no other compiler has applicable options yet
+#
+# Here's an example configure.in:
+#
+# AC_INIT(Makefile.in)
+# AX_PROG_JAVA_CC()
+# AC_OUTPUT(Makefile)
+# dnl End.
+#
+# And here's the start of the Makefile.in:
+#
+# PROJECT_ROOT := @srcdir@
+# # Tool definitions.
+# JAVAC := @JAVA_CC@
+# JAVAC_OPTS := @JAVA_CC_OPTS@
+# JAR_TOOL := @jar_tool@
+#
+# LICENSE
+#
+# Copyright (c) 2008 Nic Ferrier <nferrier at tapsellferrier.co.uk>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
+# Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+# As a special exception, the respective Autoconf Macro's copyright owner
+# gives unlimited permission to copy, distribute and modify the configure
+# scripts that are the output of Autoconf when processing the Macro. You
+# need not follow the terms of the GNU General Public License when using
+# or distributing such scripts, even though portions of the text of the
+# Macro appear in them. The GNU General Public License (GPL) does govern
+# all other use of the material that constitutes the Autoconf Macro.
+#
+# This special exception to the GPL applies to versions of the Autoconf
+# Macro released by the Autoconf Archive. When you make and distribute a
+# modified version of the Autoconf Macro, you may extend this special
+# exception to the GPL to apply to your modified version as well.
+
+#serial 4
+
+# AX_PROG_JAVA_CC([COMPILER ...])
+# --------------------------
+# COMPILER ... is a space separated list of java compilers to search for.
+# This just gives the user an opportunity to specify an alternative
+# search list for the java compiler.
+AU_ALIAS([AC_PROG_JAVA_CC], [AX_PROG_JAVA_CC])
+AC_DEFUN([AX_PROG_JAVA_CC],
+[AC_ARG_VAR([JAVA_CC], [java compiler command])dnl
+AC_ARG_VAR([JAVA_CC_FLAGS], [java compiler flags])dnl
+m4_ifval([$1],
+ [AC_CHECK_TOOLS(JAVA_CC, [$1])],
+[AC_CHECK_TOOL(JAVA_CC, gcj)
+if test -z "$JAVA_CC"; then
+ AC_CHECK_TOOL(JAVA_CC, javac)
+fi
+if test -z "$JAVA_CC"; then
+ AC_CHECK_TOOL(JAVA_CC, jikes)
+fi
+])
+
+if test "$JAVA_CC" = "gcj"; then
+ if test "$GCJ_OPTS" = ""; then
+ AC_SUBST(GCJ_OPTS,-C)
+ fi
+ AC_SUBST(JAVA_CC_OPTS, @GCJ_OPTS@,
+ [Define the compilation options for GCJ])
+fi
+test -z "$JAVA_CC" && AC_MSG_ERROR([no acceptable java compiler found in \$PATH])
+])# AX_PROG_JAVA_CC
diff --git a/m4/ax_prog_java_works.m4 b/m4/ax_prog_java_works.m4
new file mode 100644
index 0000000..54e132a
--- /dev/null
+++ b/m4/ax_prog_java_works.m4
@@ -0,0 +1,134 @@
+# ===========================================================================
+# http://www.gnu.org/software/autoconf-archive/ax_prog_java_works.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+# AX_PROG_JAVA_WORKS
+#
+# DESCRIPTION
+#
+# Internal use ONLY.
+#
+# Note: This is part of the set of autoconf M4 macros for Java programs.
+# It is VERY IMPORTANT that you download the whole set, some macros depend
+# on other. Unfortunately, the autoconf archive does not support the
+# concept of set of macros, so I had to break it for submission. The
+# general documentation, as well as the sample configure.in, is included
+# in the AX_PROG_JAVA macro.
+#
+# LICENSE
+#
+# Copyright (c) 2008 Stephane Bortzmeyer <bortzmeyer at pasteur.fr>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
+# Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+# As a special exception, the respective Autoconf Macro's copyright owner
+# gives unlimited permission to copy, distribute and modify the configure
+# scripts that are the output of Autoconf when processing the Macro. You
+# need not follow the terms of the GNU General Public License when using
+# or distributing such scripts, even though portions of the text of the
+# Macro appear in them. The GNU General Public License (GPL) does govern
+# all other use of the material that constitutes the Autoconf Macro.
+#
+# This special exception to the GPL applies to versions of the Autoconf
+# Macro released by the Autoconf Archive. When you make and distribute a
+# modified version of the Autoconf Macro, you may extend this special
+# exception to the GPL to apply to your modified version as well.
+
+#serial 9
+
+AU_ALIAS([AC_PROG_JAVA_WORKS], [AX_PROG_JAVA_WORKS])
+AC_DEFUN([AX_PROG_JAVA_WORKS], [
+AC_PATH_PROG(UUDECODE, uudecode, [no])
+if test x$UUDECODE != xno; then
+AC_CACHE_CHECK([if uudecode can decode base 64 file], ac_cv_prog_uudecode_base64, [
+dnl /**
+dnl * Test.java: used to test if java compiler works.
+dnl */
+dnl public class Test
+dnl {
+dnl
+dnl public static void
+dnl main( String[] argv )
+dnl {
+dnl System.exit (0);
+dnl }
+dnl
+dnl }
+cat << \EOF > Test.uue
+begin-base64 644 Test.class
+yv66vgADAC0AFQcAAgEABFRlc3QHAAQBABBqYXZhL2xhbmcvT2JqZWN0AQAE
+bWFpbgEAFihbTGphdmEvbGFuZy9TdHJpbmc7KVYBAARDb2RlAQAPTGluZU51
+bWJlclRhYmxlDAAKAAsBAARleGl0AQAEKEkpVgoADQAJBwAOAQAQamF2YS9s
+YW5nL1N5c3RlbQEABjxpbml0PgEAAygpVgwADwAQCgADABEBAApTb3VyY2VG
+aWxlAQAJVGVzdC5qYXZhACEAAQADAAAAAAACAAkABQAGAAEABwAAACEAAQAB
+AAAABQO4AAyxAAAAAQAIAAAACgACAAAACgAEAAsAAQAPABAAAQAHAAAAIQAB
+AAEAAAAFKrcAErEAAAABAAgAAAAKAAIAAAAEAAQABAABABMAAAACABQ=
+====
+EOF
+if $UUDECODE Test.uue; then
+ ac_cv_prog_uudecode_base64=yes
+else
+ echo "configure: __oline__: uudecode had trouble decoding base 64 file 'Test.uue'" >&AS_MESSAGE_LOG_FD
+ echo "configure: failed file was:" >&AS_MESSAGE_LOG_FD
+ cat Test.uue >&AS_MESSAGE_LOG_FD
+ ac_cv_prog_uudecode_base64=no
+fi
+rm -f Test.uue])
+fi
+if test x$ac_cv_prog_uudecode_base64 != xyes; then
+ rm -f Test.class
+ AC_MSG_WARN([I have to compile Test.class from scratch])
+ if test x$ac_cv_prog_javac_works = xno; then
+ AC_MSG_ERROR([Cannot compile java source. $JAVAC does not work properly])
+ fi
+ if test x$ac_cv_prog_javac_works = x; then
+ AX_PROG_JAVAC
+ fi
+fi
+AC_CACHE_CHECK(if $JAVA works, ac_cv_prog_java_works, [
+JAVA_TEST=Test.java
+CLASS_TEST=Test.class
+TEST=Test
+changequote(, )dnl
+cat << \EOF > $JAVA_TEST
+/* [#]line __oline__ "configure" */
+public class Test {
+public static void main (String args[]) {
+ System.exit (0);
+} }
+EOF
+changequote([, ])dnl
+if test x$ac_cv_prog_uudecode_base64 != xyes; then
+ if AC_TRY_COMMAND($JAVAC $JAVACFLAGS $JAVA_TEST) && test -s $CLASS_TEST; then
+ :
+ else
+ echo "configure: failed program was:" >&AS_MESSAGE_LOG_FD
+ cat $JAVA_TEST >&AS_MESSAGE_LOG_FD
+ AC_MSG_ERROR(The Java compiler $JAVAC failed (see config.log, check the CLASSPATH?))
+ fi
+fi
+if AC_TRY_COMMAND($JAVA -classpath . $JAVAFLAGS $TEST) >/dev/null 2>&1; then
+ ac_cv_prog_java_works=yes
+else
+ echo "configure: failed program was:" >&AS_MESSAGE_LOG_FD
+ cat $JAVA_TEST >&AS_MESSAGE_LOG_FD
+ AC_MSG_ERROR(The Java VM $JAVA failed (see config.log, check the CLASSPATH?))
+fi
+rm -fr $JAVA_TEST $CLASS_TEST Test.uue
+])
+AC_PROVIDE([$0])dnl
+]
+)
diff --git a/m4/ax_prog_javac.m4 b/m4/ax_prog_javac.m4
new file mode 100644
index 0000000..d061243
--- /dev/null
+++ b/m4/ax_prog_javac.m4
@@ -0,0 +1,79 @@
+# ===========================================================================
+# http://www.gnu.org/software/autoconf-archive/ax_prog_javac.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+# AX_PROG_JAVAC
+#
+# DESCRIPTION
+#
+# AX_PROG_JAVAC tests an existing Java compiler. It uses the environment
+# variable JAVAC then tests in sequence various common Java compilers. For
+# political reasons, it starts with the free ones.
+#
+# If you want to force a specific compiler:
+#
+# - at the configure.in level, set JAVAC=yourcompiler before calling
+# AX_PROG_JAVAC
+#
+# - at the configure level, setenv JAVAC
+#
+# You can use the JAVAC variable in your Makefile.in, with @JAVAC at .
+#
+# *Warning*: its success or failure can depend on a proper setting of the
+# CLASSPATH env. variable.
+#
+# TODO: allow to exclude compilers (rationale: most Java programs cannot
+# compile with some compilers like guavac).
+#
+# Note: This is part of the set of autoconf M4 macros for Java programs.
+# It is VERY IMPORTANT that you download the whole set, some macros depend
+# on other. Unfortunately, the autoconf archive does not support the
+# concept of set of macros, so I had to break it for submission. The
+# general documentation, as well as the sample configure.in, is included
+# in the AX_PROG_JAVA macro.
+#
+# LICENSE
+#
+# Copyright (c) 2008 Stephane Bortzmeyer <bortzmeyer at pasteur.fr>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
+# Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+# As a special exception, the respective Autoconf Macro's copyright owner
+# gives unlimited permission to copy, distribute and modify the configure
+# scripts that are the output of Autoconf when processing the Macro. You
+# need not follow the terms of the GNU General Public License when using
+# or distributing such scripts, even though portions of the text of the
+# Macro appear in them. The GNU General Public License (GPL) does govern
+# all other use of the material that constitutes the Autoconf Macro.
+#
+# This special exception to the GPL applies to versions of the Autoconf
+# Macro released by the Autoconf Archive. When you make and distribute a
+# modified version of the Autoconf Macro, you may extend this special
+# exception to the GPL to apply to your modified version as well.
+
+#serial 7
+
+AU_ALIAS([AC_PROG_JAVAC], [AX_PROG_JAVAC])
+AC_DEFUN([AX_PROG_JAVAC],[
+m4_define([m4_ax_prog_javac_list],["gcj -C" guavac jikes javac])dnl
+AS_IF([test "x$JAVAPREFIX" = x],
+ [test "x$JAVAC" = x && AC_CHECK_PROGS([JAVAC], [m4_ax_prog_javac_list])],
+ [test "x$JAVAC" = x && AC_CHECK_PROGS([JAVAC], [m4_ax_prog_javac_list], [], [$JAVAPREFIX/bin])])
+m4_undefine([m4_ax_prog_javac_list])dnl
+test "x$JAVAC" = x && AC_MSG_ERROR([no acceptable Java compiler found in \$PATH])
+AX_PROG_JAVAC_WORKS
+AC_PROVIDE([$0])dnl
+])
diff --git a/m4/ax_prog_javac_works.m4 b/m4/ax_prog_javac_works.m4
new file mode 100644
index 0000000..7dfa1e3
--- /dev/null
+++ b/m4/ax_prog_javac_works.m4
@@ -0,0 +1,72 @@
+# ===========================================================================
+# http://www.gnu.org/software/autoconf-archive/ax_prog_javac_works.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+# AX_PROG_JAVAC_WORKS
+#
+# DESCRIPTION
+#
+# Internal use ONLY.
+#
+# Note: This is part of the set of autoconf M4 macros for Java programs.
+# It is VERY IMPORTANT that you download the whole set, some macros depend
+# on other. Unfortunately, the autoconf archive does not support the
+# concept of set of macros, so I had to break it for submission. The
+# general documentation, as well as the sample configure.in, is included
+# in the AX_PROG_JAVA macro.
+#
+# LICENSE
+#
+# Copyright (c) 2008 Stephane Bortzmeyer <bortzmeyer at pasteur.fr>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
+# Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+# As a special exception, the respective Autoconf Macro's copyright owner
+# gives unlimited permission to copy, distribute and modify the configure
+# scripts that are the output of Autoconf when processing the Macro. You
+# need not follow the terms of the GNU General Public License when using
+# or distributing such scripts, even though portions of the text of the
+# Macro appear in them. The GNU General Public License (GPL) does govern
+# all other use of the material that constitutes the Autoconf Macro.
+#
+# This special exception to the GPL applies to versions of the Autoconf
+# Macro released by the Autoconf Archive. When you make and distribute a
+# modified version of the Autoconf Macro, you may extend this special
+# exception to the GPL to apply to your modified version as well.
+
+#serial 6
+
+AU_ALIAS([AC_PROG_JAVAC_WORKS], [AX_PROG_JAVAC_WORKS])
+AC_DEFUN([AX_PROG_JAVAC_WORKS],[
+AC_CACHE_CHECK([if $JAVAC works], ac_cv_prog_javac_works, [
+JAVA_TEST=Test.java
+CLASS_TEST=Test.class
+cat << \EOF > $JAVA_TEST
+/* [#]line __oline__ "configure" */
+public class Test {
+}
+EOF
+if AC_TRY_COMMAND($JAVAC $JAVACFLAGS $JAVA_TEST) >/dev/null 2>&1; then
+ ac_cv_prog_javac_works=yes
+else
+ AC_MSG_ERROR([The Java compiler $JAVAC failed (see config.log, check the CLASSPATH?)])
+ echo "configure: failed program was:" >&AS_MESSAGE_LOG_FD
+ cat $JAVA_TEST >&AS_MESSAGE_LOG_FD
+fi
+rm -f $JAVA_TEST $CLASS_TEST
+])
+AC_PROVIDE([$0])dnl
+])
diff --git a/m4/ax_prog_javadoc.m4 b/m4/ax_prog_javadoc.m4
new file mode 100644
index 0000000..bcb6045
--- /dev/null
+++ b/m4/ax_prog_javadoc.m4
@@ -0,0 +1,50 @@
+# ===========================================================================
+# http://www.gnu.org/software/autoconf-archive/ax_prog_javadoc.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+# AX_PROG_JAVADOC
+#
+# DESCRIPTION
+#
+# AX_PROG_JAVADOC tests for an existing javadoc generator. It uses the
+# environment variable JAVADOC then tests in sequence various common
+# javadoc generator.
+#
+# If you want to force a specific compiler:
+#
+# - at the configure.in level, set JAVADOC=yourgenerator before calling
+# AX_PROG_JAVADOC
+#
+# - at the configure level, setenv JAVADOC
+#
+# You can use the JAVADOC variable in your Makefile.in, with @JAVADOC at .
+#
+# Note: This macro depends on the autoconf M4 macros for Java programs. It
+# is VERY IMPORTANT that you download that whole set, some macros depend
+# on other. Unfortunately, the autoconf archive does not support the
+# concept of set of macros, so I had to break it for submission.
+#
+# The general documentation of those macros, as well as the sample
+# configure.in, is included in the AX_PROG_JAVA macro.
+#
+# LICENSE
+#
+# Copyright (c) 2008 Egon Willighagen <e.willighagen at science.ru.nl>
+#
+# Copying and distribution of this file, with or without modification, are
+# permitted in any medium without royalty provided the copyright notice
+# and this notice are preserved. This file is offered as-is, without any
+# warranty.
+
+#serial 8
+
+AU_ALIAS([AC_PROG_JAVADOC], [AX_PROG_JAVADOC])
+AC_DEFUN([AX_PROG_JAVADOC],[
+AS_IF([test "x$JAVAPREFIX" = x],
+ [test "x$JAVADOC" = x && AC_CHECK_PROGS([JAVADOC], [javadoc])],
+ [test "x$JAVADOC" = x && AC_CHECK_PROGS([JAVADOC], [javadoc], [], [$JAVAPREFIX/bin])])
+test "x$JAVADOC" = x && AC_MSG_ERROR([no acceptable javadoc generator found in \$PATH])
+AC_PROVIDE([$0])dnl
+])
diff --git a/m4/ax_prog_javah.m4 b/m4/ax_prog_javah.m4
new file mode 100644
index 0000000..cefc616
--- /dev/null
+++ b/m4/ax_prog_javah.m4
@@ -0,0 +1,64 @@
+# ===========================================================================
+# http://www.gnu.org/software/autoconf-archive/ax_prog_javah.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+# AX_PROG_JAVAH
+#
+# DESCRIPTION
+#
+# AX_PROG_JAVAH tests the availability of the javah header generator and
+# looks for the jni.h header file. If available, JAVAH is set to the full
+# path of javah and CPPFLAGS is updated accordingly.
+#
+# LICENSE
+#
+# Copyright (c) 2008 Luc Maisonobe <luc at spaceroots.org>
+#
+# Copying and distribution of this file, with or without modification, are
+# permitted in any medium without royalty provided the copyright notice
+# and this notice are preserved. This file is offered as-is, without any
+# warranty.
+
+#serial 8
+
+AU_ALIAS([AC_PROG_JAVAH], [AX_PROG_JAVAH])
+AC_DEFUN([AX_PROG_JAVAH],[
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+AC_REQUIRE([AC_PROG_CPP])dnl
+AC_PATH_PROG(JAVAH,javah)
+AS_IF([test -n "$ac_cv_path_JAVAH"],
+ [
+ AC_TRY_CPP([#include <jni.h>],,[
+ ac_save_CPPFLAGS="$CPPFLAGS"
+ _ACJAVAH_FOLLOW_SYMLINKS("$ac_cv_path_JAVAH")
+ ax_prog_javah_bin_dir=`AS_DIRNAME([$_ACJAVAH_FOLLOWED])`
+ ac_dir="`AS_DIRNAME([$ax_prog_javah_bin_dir])`/include"
+ AS_CASE([$build_os],
+ [cygwin*],
+ [ac_machdep=win32],
+ [ac_machdep=`AS_ECHO($build_os) | sed 's,[[-0-9]].*,,'`])
+ CPPFLAGS="$ac_save_CPPFLAGS -I$ac_dir -I$ac_dir/$ac_machdep"
+ AC_TRY_CPP([#include <jni.h>],
+ ac_save_CPPFLAGS="$CPPFLAGS",
+ AC_MSG_WARN([unable to include <jni.h>]))
+ CPPFLAGS="$ac_save_CPPFLAGS"])
+ ])
+])
+
+AC_DEFUN([_ACJAVAH_FOLLOW_SYMLINKS],[
+# find the include directory relative to the javac executable
+_cur="$1"
+while ls -ld "$_cur" 2>/dev/null | grep " -> " >/dev/null; do
+ AC_MSG_CHECKING([symlink for $_cur])
+ _slink=`ls -ld "$_cur" | sed 's/.* -> //'`
+ case "$_slink" in
+ /*) _cur="$_slink";;
+ # 'X' avoids triggering unwanted echo options.
+ *) _cur=`echo "X$_cur" | sed -e 's/^X//' -e 's:[[^/]]*$::'`"$_slink";;
+ esac
+ AC_MSG_RESULT([$_cur])
+done
+_ACJAVAH_FOLLOWED="$_cur"
+])
diff --git a/m4/ax_try_compile_java.m4 b/m4/ax_try_compile_java.m4
new file mode 100644
index 0000000..a8ed6b2
--- /dev/null
+++ b/m4/ax_try_compile_java.m4
@@ -0,0 +1,55 @@
+# ===========================================================================
+# http://www.gnu.org/software/autoconf-archive/ax_try_compile_java.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+# AX_TRY_COMPILE_JAVA
+#
+# DESCRIPTION
+#
+# AX_TRY_COMPILE_JAVA attempt to compile user given source.
+#
+# *Warning*: its success or failure can depend on a proper setting of the
+# CLASSPATH env. variable.
+#
+# Note: This is part of the set of autoconf M4 macros for Java programs.
+# It is VERY IMPORTANT that you download the whole set, some macros depend
+# on other. Unfortunately, the autoconf archive does not support the
+# concept of set of macros, so I had to break it for submission. The
+# general documentation, as well as the sample configure.in, is included
+# in the AX_PROG_JAVA macro.
+#
+# LICENSE
+#
+# Copyright (c) 2008 Devin Weaver <ktohg at tritarget.com>
+#
+# Copying and distribution of this file, with or without modification, are
+# permitted in any medium without royalty provided the copyright notice
+# and this notice are preserved. This file is offered as-is, without any
+# warranty.
+
+#serial 8
+
+AU_ALIAS([AC_TRY_COMPILE_JAVA], [AX_TRY_COMPILE_JAVA])
+AC_DEFUN([AX_TRY_COMPILE_JAVA],[
+AC_REQUIRE([AX_PROG_JAVAC])dnl
+cat << \EOF > Test.java
+/* [#]line __oline__ "configure" */
+ifelse([$1], , , [import $1;])
+public class Test {
+[$2]
+}
+EOF
+if AC_TRY_COMMAND($JAVAC $JAVACFLAGS Test.java) && test -s Test.class
+then
+dnl Don't remove the temporary files here, so they can be examined.
+ ifelse([$3], , :, [$3])
+else
+ echo "configure: failed program was:" >&AS_MESSAGE_LOG_FD
+ cat Test.java >&AS_MESSAGE_LOG_FD
+ifelse([$4], , , [ rm -fr Test.java Test.class
+ $4
+])dnl
+fi
+rm -fr Test.java Test.class])
diff --git a/m4/ax_try_run_java.m4 b/m4/ax_try_run_java.m4
new file mode 100644
index 0000000..c680f03
--- /dev/null
+++ b/m4/ax_try_run_java.m4
@@ -0,0 +1,56 @@
+# ===========================================================================
+# http://www.gnu.org/software/autoconf-archive/ax_try_run_java.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+# AX_TRY_RUN_JAVA
+#
+# DESCRIPTION
+#
+# AX_TRY_RUN_JAVA attempt to compile and run user given source.
+#
+# *Warning*: its success or failure can depend on a proper setting of the
+# CLASSPATH env. variable.
+#
+# Note: This is part of the set of autoconf M4 macros for Java programs.
+# It is VERY IMPORTANT that you download the whole set, some macros depend
+# on other. Unfortunately, the autoconf archive does not support the
+# concept of set of macros, so I had to break it for submission. The
+# general documentation, as well as the sample configure.in, is included
+# in the AX_PROG_JAVA macro.
+#
+# LICENSE
+#
+# Copyright (c) 2008 Devin Weaver <ktohg at tritarget.com>
+#
+# Copying and distribution of this file, with or without modification, are
+# permitted in any medium without royalty provided the copyright notice
+# and this notice are preserved. This file is offered as-is, without any
+# warranty.
+
+#serial 2
+
+AU_ALIAS([AC_TRY_RUN_JAVA], [AX_TRY_RUN_JAVA])
+AC_DEFUN([AX_TRY_RUN_JAVA],[
+AC_REQUIRE([AX_PROG_JAVAC])dnl
+AC_REQUIRE([AX_PROG_JAVA])dnl
+cat << \EOF > Test.java
+/* [#]line __oline__ "configure" */
+ifelse([$1], , , [include $1;])
+public class Test {
+[$2]
+}
+EOF
+if AC_TRY_COMMAND($JAVAC $JAVACFLAGS Test.java) && test -s Test.class && ($JAVA $JAVAFLAGS Test; exit) 2>/dev/null
+then
+dnl Don't remove the temporary files here, so they can be examined.
+ ifelse([$3], , :, [$3])
+else
+ echo "configure: failed program was:" >&AS_MESSAGE_LOG_FD
+ cat Test.java >&AS_MESSAGE_LOG_FD
+ifelse([$4], , , [ rm -fr Test.java Test.class
+ $4
+])dnl
+fi
+rm -fr Test.java Test.class])
diff --git a/man/Makefile.in b/man/Makefile.in
index 710519e..dbafc3a 100644
--- a/man/Makefile.in
+++ b/man/Makefile.in
@@ -81,7 +81,20 @@ DIST_COMMON = $(top_srcdir)/config/commence.am $(srcdir)/Makefile.in \
$(srcdir)/Makefile.am $(top_srcdir)/bin/mkinstalldirs
subdir = man
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -182,12 +195,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -230,11 +258,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/man/hdf.1 b/man/hdf.1
index 42a58d0..3c1b7d9 100644
--- a/man/hdf.1
+++ b/man/hdf.1
@@ -1,5 +1,5 @@
-.\" $Id: hdf.1 6194 2015-02-05 14:17:12Z bmribler $
-.TH HDF 1 "February 2015" "THG HDF 4.2.11"
+.\" $Id: hdf.1 6473 2016-06-24 21:38:14Z bmribler $
+.TH HDF 1 "June 2016" "THG HDF 4.2.12"
.SH NAME
hdf \- Hierarchical Data Format library
.SH SYNOPSIS
@@ -135,7 +135,7 @@ USA
www.hdfgroup.org
.SH VERSION
-4.2.11
+4.2.12
.SH LICENSE & SOURCE AVAILABILITY
Copyright by The HDF Group.
.sp 0
diff --git a/mfhdf/CMakeLists.txt b/mfhdf/CMakeLists.txt
index c04d80a..16e5164 100644
--- a/mfhdf/CMakeLists.txt
+++ b/mfhdf/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required (VERSION 2.8.10)
+cmake_minimum_required (VERSION 3.1.0)
PROJECT (HDF4_MFHDF)
if (CYGWIN)
diff --git a/mfhdf/Makefile.in b/mfhdf/Makefile.in
index 9357b62..d53ecac 100644
--- a/mfhdf/Makefile.in
+++ b/mfhdf/Makefile.in
@@ -82,7 +82,20 @@ DIST_COMMON = $(top_srcdir)/config/commence.am $(srcdir)/Makefile.in \
THANKS
subdir = mfhdf
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -211,12 +224,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -259,11 +287,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/mfhdf/dumper/CMakeLists.txt b/mfhdf/dumper/CMakeLists.txt
index 57448ac..cf7dcf4 100644
--- a/mfhdf/dumper/CMakeLists.txt
+++ b/mfhdf/dumper/CMakeLists.txt
@@ -1,5 +1,5 @@
-cmake_minimum_required (VERSION 2.8.10)
-PROJECT (HDF4_MFHDF_DUMPER)
+cmake_minimum_required (VERSION 3.1.0)
+project (HDF4_MFHDF_DUMPER)
INCLUDE_DIRECTORIES (${HDF4_HDFSOURCE_DIR})
INCLUDE_DIRECTORIES (${HDF4_MFHDFSOURCE_DIR})
@@ -18,8 +18,8 @@ set (hdp_SRCS
${HDF4_MFHDF_DUMPER_SOURCE_DIR}/hdp_vg.c
${HDF4_MFHDF_DUMPER_SOURCE_DIR}/show.c
)
-
-ADD_EXECUTABLE (hdp ${hdp_SRCS})
+
+add_executable (hdp ${hdp_SRCS})
if (WIN32)
add_definitions (-DDOS_FS)
endif (WIN32)
@@ -30,9 +30,9 @@ if (HDF4_BUILD_XDR_LIB)
INCLUDE_DIRECTORIES (${HDF4_MFHDF_XDR_DIR})
target_link_libraries (hdp ${HDF4_MF_XDR_LIB_TARGET})
endif (HDF4_BUILD_XDR_LIB)
-TARGET_C_PROPERTIES (hdp " " " ")
+TARGET_C_PROPERTIES (hdp STATIC " " " ")
target_link_libraries (hdp ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
-TARGET_NAMING (hdp ${LIB_TYPE})
+TARGET_NAMING (hdp STATIC)
if (WIN32)
set_target_properties (hdp PROPERTIES LINK_FLAGS "/STACK:10000000")
endif (WIN32)
@@ -56,8 +56,5 @@ INSTALL_PROGRAM_PDB (hdp ${HDF4_INSTALL_TOOLS_BIN_DIR} toolsapplications)
INSTALL (
TARGETS
hdp
- RUNTIME DESTINATION
- ${HDF4_INSTALL_TOOLS_BIN_DIR}
- COMPONENT
- toolsapplications
+ RUNTIME DESTINATION ${HDF4_INSTALL_TOOLS_BIN_DIR} COMPONENT toolsapplications
)
diff --git a/mfhdf/dumper/Makefile.in b/mfhdf/dumper/Makefile.in
index 05f5018..eec9184 100644
--- a/mfhdf/dumper/Makefile.in
+++ b/mfhdf/dumper/Makefile.in
@@ -90,7 +90,20 @@ bin_PROGRAMS = hdp$(EXEEXT)
TESTS = $(TEST_SCRIPT)
subdir = mfhdf/dumper
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -414,12 +427,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -462,11 +490,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/mfhdf/dumper/hdp.c b/mfhdf/dumper/hdp.c
index 0730754..40e23e0 100644
--- a/mfhdf/dumper/hdp.c
+++ b/mfhdf/dumper/hdp.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6157 $";
-#endif
-
-/* $Id: hdp.c 6157 2014-12-08 05:25:17Z bmribler $ */
+/* $Id: hdp.c 6357 2016-05-13 05:00:06Z bmribler $ */
#define HDP_MASTER
#define VSET_INTERFACE
#include "hdp.h"
diff --git a/mfhdf/dumper/hdp.h b/mfhdf/dumper/hdp.h
index 2b5a3bc..0b24d1f 100644
--- a/mfhdf/dumper/hdp.h
+++ b/mfhdf/dumper/hdp.h
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5706 $";
-#endif
-
-/* $Id: hdp.h 5706 2011-11-01 18:23:02Z bmribler $ */
+/* $Id: hdp.h 6357 2016-05-13 05:00:06Z bmribler $ */
#ifndef __HDP_H
#define __HDP_H
diff --git a/mfhdf/dumper/hdp_dump.c b/mfhdf/dumper/hdp_dump.c
index aafe9aa..792d332 100644
--- a/mfhdf/dumper/hdp_dump.c
+++ b/mfhdf/dumper/hdp_dump.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "$Revision: 5706 $";
-#endif
-
-/* $Id: hdp_dump.c 5706 2011-11-01 18:23:02Z bmribler $ */
+/* $Id: hdp_dump.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include <stdio.h>
#include "mfhdf.h"
diff --git a/mfhdf/dumper/hdp_gr.c b/mfhdf/dumper/hdp_gr.c
index c56052d..c118284 100644
--- a/mfhdf/dumper/hdp_gr.c
+++ b/mfhdf/dumper/hdp_gr.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)Revision";
-#endif
-
-/* $Id: hdp_gr.c 5831 2012-07-20 07:03:42Z bmribler $ */
+/* $Id: hdp_gr.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include <stdio.h>
#include "mfhdf.h"
diff --git a/mfhdf/dumper/hdp_list.c b/mfhdf/dumper/hdp_list.c
index 7e038bc..3e106ec 100644
--- a/mfhdf/dumper/hdp_list.c
+++ b/mfhdf/dumper/hdp_list.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5706 $";
-#endif
-
-/* $Id: hdp_list.c 5706 2011-11-01 18:23:02Z bmribler $ */
+/* $Id: hdp_list.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include "mfhdf.h"
#include "hdp.h"
diff --git a/mfhdf/dumper/hdp_rig.c b/mfhdf/dumper/hdp_rig.c
index 7f866c7..c20765e 100644
--- a/mfhdf/dumper/hdp_rig.c
+++ b/mfhdf/dumper/hdp_rig.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5706 $";
-#endif
-
-/* $Id: hdp_rig.c 5706 2011-11-01 18:23:02Z bmribler $ */
+/* $Id: hdp_rig.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include <stdio.h>
#include "mfhdf.h"
diff --git a/mfhdf/dumper/hdp_sds.c b/mfhdf/dumper/hdp_sds.c
index 60c47e2..9889c36 100644
--- a/mfhdf/dumper/hdp_sds.c
+++ b/mfhdf/dumper/hdp_sds.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)Revision";
-#endif
-
-/* $Id: hdp_sds.c 5886 2012-10-06 04:23:07Z bmribler $ */
+/* $Id: hdp_sds.c 6441 2016-06-14 21:31:36Z bmribler $ */
#include <stdio.h>
#include "mfhdf.h"
@@ -1191,6 +1187,13 @@ intn dsd(dump_info_t *dumpsds_opts,
HDstrcpy( dumpsds_opts->ifile_name, file_name ); /* record file name */
curr_arg++; /* move argument pointer forward */
+ /* HDF4 doesn't process netCDF 64-bit files */
+ if (HDisnetcdf64(file_name))
+ {
+ printf("Invalid input file: hdp cannot read a netCDF 64-bit file, %s\n", file_name );
+ continue; /* to the next file */
+ }
+
if (HDisnetcdf(file_name)) /* record if file is netCDF */
dumpsds_opts->file_type = netCDF_FILE;
else if (Hishdf(file_name)) /* record if file is HDF */
diff --git a/mfhdf/dumper/hdp_util.c b/mfhdf/dumper/hdp_util.c
index 37641f5..46a1328 100644
--- a/mfhdf/dumper/hdp_util.c
+++ b/mfhdf/dumper/hdp_util.c
@@ -11,10 +11,6 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)1.1";
-#endif
-
/* hdp_util.c,v 1.1 1994/04/18 15:49:18 georgev Exp */
#include "hdp.h"
diff --git a/mfhdf/dumper/hdp_vd.c b/mfhdf/dumper/hdp_vd.c
index c0835a5..a236289 100644
--- a/mfhdf/dumper/hdp_vd.c
+++ b/mfhdf/dumper/hdp_vd.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6043 $";
-#endif
-
-/* $Id: hdp_vd.c 6043 2014-01-21 21:09:03Z acheng $ */
+/* $Id: hdp_vd.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include "hdp.h"
#ifndef MIPSEL
diff --git a/mfhdf/dumper/hdp_vg.c b/mfhdf/dumper/hdp_vg.c
index 7067b45..0df7e74 100644
--- a/mfhdf/dumper/hdp_vg.c
+++ b/mfhdf/dumper/hdp_vg.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6157 $";
-#endif
-
-/* $Id: hdp_vg.c 6157 2014-12-08 05:25:17Z bmribler $ */
+/* $Id: hdp_vg.c 6357 2016-05-13 05:00:06Z bmribler $ */
#include "hdp.h"
#ifndef MIPSEL
diff --git a/mfhdf/dumper/show.c b/mfhdf/dumper/show.c
index 84cc819..8fecd0c 100644
--- a/mfhdf/dumper/show.c
+++ b/mfhdf/dumper/show.c
@@ -13,10 +13,6 @@
/* Modified from vshow.c by Eric Tsui, 12/25/1994. */
-#ifdef RCSID
-static char *RcsId[] = "@(#)$Revision: 6043 $";
-#endif
-
#define VSET_INTERFACE
#include "hdp.h"
diff --git a/mfhdf/dumper/testfiles/Roy-64.nc b/mfhdf/dumper/testfiles/Roy-64.nc
new file mode 100644
index 0000000..460d3b6
Binary files /dev/null and b/mfhdf/dumper/testfiles/Roy-64.nc differ
diff --git a/mfhdf/dumper/testfiles/dumpsds-19.out b/mfhdf/dumper/testfiles/dumpsds-19.out
new file mode 100644
index 0000000..3c2f5af
--- /dev/null
+++ b/mfhdf/dumper/testfiles/dumpsds-19.out
@@ -0,0 +1 @@
+Invalid input file: hdp cannot read a netCDF 64-bit file, Roy-64.nc
diff --git a/mfhdf/dumper/testhdp.sh.in b/mfhdf/dumper/testhdp.sh.in
index 5e188ff..3b1b439 100644
--- a/mfhdf/dumper/testhdp.sh.in
+++ b/mfhdf/dumper/testhdp.sh.in
@@ -1,5 +1,5 @@
#! /bin/sh
-# $Id: testhdp.sh.in 6104 2014-04-25 19:39:26Z derobins $
+# $Id: testhdp.sh.in 6441 2016-06-14 21:31:36Z bmribler $
# Test scripts for hdp (dumper).
# See the USAGE function for command usage.
@@ -254,6 +254,9 @@ TEST dumpsds-17.out dumpsds -k -h -i 39,36 -n data34,data27 -r 36,37 -i 0,1 -n d
# Test 18 reads a few small datasets in a netCDF file
TEST dumpsds-18.out dumpsds -i 0,1,2 Roy.nc
+# Test 19 displays a message when the file is a netCDF 64-bit file
+TEST dumpsds-19.out dumpsds Roy-64.nc
+
else
MESG 3 "$TestName <<<SKIPPED>>>"
fi
diff --git a/mfhdf/examples/CMakeLists.txt b/mfhdf/examples/CMakeLists.txt
index 53184c1..617d46d 100644
--- a/mfhdf/examples/CMakeLists.txt
+++ b/mfhdf/examples/CMakeLists.txt
@@ -1,5 +1,5 @@
-cmake_minimum_required (VERSION 2.8.10)
-PROJECT (HDF4_MFHDF_EXAMPLES)
+cmake_minimum_required (VERSION 3.1.0)
+project (HDF4_MFHDF_EXAMPLES)
#-----------------------------------------------------------------------------
# Define Sources
@@ -24,9 +24,9 @@ set (examples
)
foreach (example ${examples})
- ADD_EXECUTABLE (mf_${example} ${HDF4_MFHDF_EXAMPLES_SOURCE_DIR}/${example}.c)
- TARGET_NAMING (mf_${example} ${LIB_TYPE})
- TARGET_C_PROPERTIES (mf_${example} " " " ")
+ add_executable (mf_${example} ${HDF4_MFHDF_EXAMPLES_SOURCE_DIR}/${example}.c)
+ TARGET_NAMING (mf_${example} STATIC)
+ TARGET_C_PROPERTIES (mf_${example} STATIC " " " ")
if (HDF4_BUILD_XDR_LIB)
target_link_libraries (mf_${example} ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS} ${HDF4_MF_XDR_LIB_TARGET})
else (HDF4_BUILD_XDR_LIB)
diff --git a/mfhdf/examples/CMakeTests.cmake b/mfhdf/examples/CMakeTests.cmake
index 0fe2ac4..c959a36 100644
--- a/mfhdf/examples/CMakeTests.cmake
+++ b/mfhdf/examples/CMakeTests.cmake
@@ -21,11 +21,11 @@ set_tests_properties (MFHDF_EXAMPLES-clearall-objects PROPERTIES LABELS ${PROJEC
set (last_test "MFHDF_EXAMPLES-clearall-objects")
foreach (example ${examples})
- add_test (NAME mftest_${example} COMMAND $<TARGET_FILE:mf_${example}>)
+ add_test (NAME MFHDF_EXAMPLES-${example} COMMAND $<TARGET_FILE:mf_${example}>)
if (NOT "${last_test}" STREQUAL "")
- set_tests_properties (mftest_${example} PROPERTIES DEPENDS ${last_test} LABELS ${PROJECT_NAME})
+ set_tests_properties (MFHDF_EXAMPLES-${example} PROPERTIES DEPENDS ${last_test} LABELS ${PROJECT_NAME})
else (NOT "${last_test}" STREQUAL "")
- set_tests_properties (mftest_${example} PROPERTIES LABELS ${PROJECT_NAME})
+ set_tests_properties (MFHDF_EXAMPLES-${example} PROPERTIES LABELS ${PROJECT_NAME})
endif (NOT "${last_test}" STREQUAL "")
- set (last_test "mftest_${example}")
+ set (last_test "MFHDF_EXAMPLES-${example}")
endforeach (example ${examples})
diff --git a/mfhdf/examples/Makefile.am b/mfhdf/examples/Makefile.am
index 7bfbacb..affa165 100644
--- a/mfhdf/examples/Makefile.am
+++ b/mfhdf/examples/Makefile.am
@@ -23,7 +23,7 @@ INSTALL_FILES = SD_create_sds.c SD_write_to_sds.c SD_write_slab.c \
SD_get_info.c SD_find_sds_by_name.c SD_set_get_dim_info.c \
SD_dimscale_vs_sds.c SD_set_attr.c SD_get_attr.c SD_chunking_example.c
-EXAMPLEDIR=$(prefix)/examples/c
+EXAMPLEDIR=${DESTDIR}$(prefix)/examples/c
# How to build programs using h4cc
$(EXTRA_PROG): $(H4CC)
diff --git a/mfhdf/examples/Makefile.in b/mfhdf/examples/Makefile.in
index 2b0fe44..b8767bb 100644
--- a/mfhdf/examples/Makefile.in
+++ b/mfhdf/examples/Makefile.in
@@ -92,7 +92,20 @@ DIST_COMMON = $(top_srcdir)/config/commence.am \
TESTS = $(TEST_PROG)
subdir = mfhdf/examples
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -364,12 +377,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -412,11 +440,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
@@ -507,7 +538,7 @@ INSTALL_FILES = SD_create_sds.c SD_write_to_sds.c SD_write_slab.c \
SD_get_info.c SD_find_sds_by_name.c SD_set_get_dim_info.c \
SD_dimscale_vs_sds.c SD_set_attr.c SD_get_attr.c SD_chunking_example.c
-EXAMPLEDIR = $(prefix)/examples/c
+EXAMPLEDIR = ${DESTDIR}$(prefix)/examples/c
DISTCLEANFILES = *.chklog *.chkexe .deps
@BUILD_SHARED_SZIP_CONDITIONAL_TRUE at LD_LIBRARY_PATH = $(LL_PATH)
diff --git a/mfhdf/examples/SD_chunking_example.c b/mfhdf/examples/SD_chunking_example.c
index 5d1b23c..d6d8504 100644
--- a/mfhdf/examples/SD_chunking_example.c
+++ b/mfhdf/examples/SD_chunking_example.c
@@ -24,21 +24,21 @@ int main()
/*
* Declare chunks data type and initialize some of them.
*/
- int16 chunk1[3][2] = { 1, 1,
- 1, 1,
- 1, 1 };
+ int16 chunk1[3][2] = { {1, 1},
+ {1, 1},
+ {1, 1} };
- int16 chunk2[3][2] = { 2, 2,
- 2, 2,
- 2, 2 };
+ int16 chunk2[3][2] = { {2, 2},
+ {2, 2},
+ {2, 2} };
- int16 chunk3[3][2] = { 3, 3,
- 3, 3,
- 3, 3 };
+ int16 chunk3[3][2] = { {3, 3},
+ {3, 3},
+ {3, 3} };
- int16 chunk6[3][2] = { 6, 6,
- 6, 6,
- 6, 6 };
+ int16 chunk6[3][2] = { {6, 6},
+ {6, 6},
+ {6, 6} };
/********************* End of variable declaration ***********************/
/*
diff --git a/mfhdf/examples/SD_find_sds_by_name.c b/mfhdf/examples/SD_find_sds_by_name.c
index 68733f2..7a735b5 100644
--- a/mfhdf/examples/SD_find_sds_by_name.c
+++ b/mfhdf/examples/SD_find_sds_by_name.c
@@ -14,7 +14,7 @@ int main()
intn status;
int32 start[2], edges[2];
int32 data[Y_LENGTH][X_LENGTH];
- int i, j;
+ int j;
/********************* End of variable declaration ***********************/
diff --git a/mfhdf/examples/SD_get_attr.c b/mfhdf/examples/SD_get_attr.c
index 79cfd8b..72763bf 100644
--- a/mfhdf/examples/SD_get_attr.c
+++ b/mfhdf/examples/SD_get_attr.c
@@ -34,7 +34,7 @@ int main()
status = SDattrinfo (sd_id, attr_index, attr_name, &data_type, &n_values);
/* The data type should be DFNT_CHAR, from SD_set_attr.c */
- if (data_type = DFNT_CHAR)
+ if (data_type == DFNT_CHAR)
{
char *fileattr_data;
@@ -129,6 +129,7 @@ int main()
* Read the dimension attribute data.
*/
status = SDreadattr (dim_id, attr_index, dimattr_data);
+ dimattr_data[n_values-1] = '\0';
/*
* Print out dimension attribute value and free buffer.
diff --git a/mfhdf/examples/SD_mv_sds_to_external.c b/mfhdf/examples/SD_mv_sds_to_external.c
index a496095..54f020f 100644
--- a/mfhdf/examples/SD_mv_sds_to_external.c
+++ b/mfhdf/examples/SD_mv_sds_to_external.c
@@ -9,7 +9,7 @@ int main()
/************************* Variable declaration **************************/
- int32 sd_id, sds_id, sds_index, offset;
+ int32 sd_id, sds_id, sds_index;
intn status;
/********************* End of variable declaration ***********************/
diff --git a/mfhdf/examples/SD_read_from_sds.c b/mfhdf/examples/SD_read_from_sds.c
index 10988c6..539276a 100644
--- a/mfhdf/examples/SD_read_from_sds.c
+++ b/mfhdf/examples/SD_read_from_sds.c
@@ -12,7 +12,7 @@ int main()
intn status;
int32 start[2], edges[2];
int32 data[Y_LENGTH][X_LENGTH];
- int i, j;
+ int j;
/********************* End of variable declaration ***********************/
diff --git a/mfhdf/fortran/CMakeLists.txt b/mfhdf/fortran/CMakeLists.txt
index 39e6d2c..401a46b 100644
--- a/mfhdf/fortran/CMakeLists.txt
+++ b/mfhdf/fortran/CMakeLists.txt
@@ -1,8 +1,7 @@
-cmake_minimum_required (VERSION 2.8.10)
+cmake_minimum_required (VERSION 3.1.0)
PROJECT (HDF4_MFHDF_FORTRAN C CXX Fortran)
INCLUDE_DIRECTORIES (
- ${CMAKE_Fortran_MODULE_DIRECTORY}
${HDF4_HDF_BINARY_DIR}
${HDF4_MFHDF_FORTRAN_BINARY_DIR}
${HDF4_HDFSOURCE_DIR}
@@ -16,8 +15,6 @@ else (CYGWIN)
INCLUDE_DIRECTORIES (/usr/include/rpc)
endif (CYGWIN)
-add_definitions (-DHDF)
-
if (HDF4_ENABLE_NETCDF)
configure_file (${HDF4_MFHDF_FORTRAN_SOURCE_DIR}/ftest.f.in ${HDF4_BINARY_DIR}/ftest.f)
configure_file (${HDF4_MFHDF_FORTRAN_SOURCE_DIR}/jackets.c.in ${HDF4_BINARY_DIR}/jackets.c)
@@ -31,9 +28,9 @@ else (HDF4_ENABLE_NETCDF)
set (HDF4_MFHDF_LIBSRC_CSTUB_FSRCS ${HDF4_MFHDF_FORTRAN_SOURCE_DIR}/mfsdf.c)
endif (HDF4_ENABLE_NETCDF)
-set (HDF4_MFHDF_LIBSRC_CSTUB_FHDRS
- ${HDF4_MFHDFSOURCE_DIR}/hdf2netcdf.h
- ${HDF4_MFHDFSOURCE_DIR}/mfhdf.h
+set (HDF4_MFHDF_LIBSRC_CSTUB_FHDRS
+ ${HDF4_MFHDFSOURCE_DIR}/hdf2netcdf.h
+ ${HDF4_MFHDFSOURCE_DIR}/mfhdf.h
${HDF4_MFHDFSOURCE_DIR}/mfhdfi.h
)
if (HDF4_ENABLE_NETCDF)
@@ -42,47 +39,107 @@ if (HDF4_ENABLE_NETCDF)
else (HDF4_ENABLE_NETCDF)
set (HDF4_MFHDF_LIBSRC_CSTUB_FHDRS ${HDF4_MFHDF_LIBSRC_CSTUB_FHDRS} ${HDF4_BINARY_DIR}/hdf4_netcdf.h)
endif (HDF4_ENABLE_NETCDF)
+set_source_files_properties (${HDF4_MFHDF_LIBSRC_CSTUB_FSRCS} PROPERTIES LANGUAGE C)
set (FORTRAN_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR})
#-----------------------------------------------------------------------------
# Add Main fortran library
#-----------------------------------------------------------------------------
-add_library (${HDF4_MF_FCSTUB_LIB_TARGET} ${LIB_TYPE} ${HDF4_MFHDF_LIBSRC_CSTUB_FSRCS} ${HDF4_MFHDF_LIBSRC_CSTUB_FHDRS} ${HDF4_HDF_SRC_CHDRS} ${HDF4_HDFSOURCE_DIR}/hproto_fortran.h)
-set_target_properties (${HDF4_MF_FCSTUB_LIB_TARGET} PROPERTIES LINKER_LANGUAGE C)
-if (WIN32)
- add_definitions (-DDOS_FS)
-endif (WIN32)
+add_library (${HDF4_MF_FCSTUB_LIB_TARGET} STATIC ${HDF4_MFHDF_LIBSRC_CSTUB_FSRCS} ${HDF4_MFHDF_LIBSRC_CSTUB_FHDRS} ${HDF4_HDF_SRC_CHDRS} ${HDF4_HDFSOURCE_DIR}/hproto_fortran.h)
if (HDF4_BUILD_XDR_LIB)
- if (WIN32)
- add_definitions (-DNO_SYS_XDR_INC)
- endif (WIN32)
INCLUDE_DIRECTORIES (${HDF4_MFHDF_XDR_DIR})
target_link_libraries (${HDF4_MF_FCSTUB_LIB_TARGET} ${HDF4_MF_XDR_LIB_TARGET})
endif (HDF4_BUILD_XDR_LIB)
-TARGET_C_PROPERTIES (${HDF4_MF_FCSTUB_LIB_TARGET} " " " ")
+TARGET_C_PROPERTIES (${HDF4_MF_FCSTUB_LIB_TARGET} STATIC " " " ")
target_link_libraries (${HDF4_MF_FCSTUB_LIB_TARGET} ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET})
set_global_variable (HDF4_LIBRARIES_TO_EXPORT "${HDF4_LIBRARIES_TO_EXPORT};${HDF4_MF_FCSTUB_LIB_TARGET}")
-H4_SET_LIB_OPTIONS (${HDF4_MF_FCSTUB_LIB_TARGET} ${HDF4_MF_FCSTUB_LIB_NAME} ${LIB_TYPE})
+H4_SET_LIB_OPTIONS (${HDF4_MF_FCSTUB_LIB_TARGET} ${HDF4_MF_FCSTUB_LIB_NAME} STATIC)
+set_target_properties (${HDF4_MF_FCSTUB_LIB_TARGET} PROPERTIES
+ FOLDER libraries/fortran
+ LINKER_LANGUAGE C
+ COMPILE_DEFINITIONS "HDF"
+ INTERFACE_INCLUDE_DIRECTORIES "$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/include>"
+)
+if (WIN32)
+ set_property (TARGET ${HDF4_MF_FCSTUB_LIB_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS "DOS_FS")
+ if (HDF4_BUILD_XDR_LIB)
+ set_property (TARGET ${HDF4_MF_FCSTUB_LIB_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS "NO_SYS_XDR_INC")
+ endif (HDF4_BUILD_XDR_LIB)
+endif (WIN32)
+set (install_targets ${HDF4_MF_FCSTUB_LIB_TARGET})
+
+if (BUILD_SHARED_LIBS)
+ add_library (${HDF4_MF_FCSTUB_LIBSH_TARGET} SHARED ${HDF4_MFHDF_LIBSRC_CSTUB_FSRCS} ${HDF4_MFHDF_LIBSRC_CSTUB_FHDRS} ${HDF4_HDF_SRC_CHDRS} ${HDF4_HDFSOURCE_DIR}/hproto_fortran.h)
+ if (HDF4_BUILD_XDR_LIB)
+ INCLUDE_DIRECTORIES (${HDF4_MFHDF_XDR_DIR})
+ target_link_libraries (${HDF4_MF_FCSTUB_LIBSH_TARGET} ${HDF4_MF_XDR_LIBSH_TARGET})
+ endif (HDF4_BUILD_XDR_LIB)
+ TARGET_C_PROPERTIES (${HDF4_MF_FCSTUB_LIBSH_TARGET} SHARED " " " ")
+ target_link_libraries (${HDF4_MF_FCSTUB_LIBSH_TARGET} ${HDF4_MF_LIBSH_TARGET} ${HDF4_SRC_LIBSH_TARGET})
+ set_global_variable (HDF4_LIBRARIES_TO_EXPORT "${HDF4_LIBRARIES_TO_EXPORT};${HDF4_MF_FCSTUB_LIBSH_TARGET}")
+ H4_SET_LIB_OPTIONS (${HDF4_MF_FCSTUB_LIBSH_TARGET} ${HDF4_MF_FCSTUB_LIB_NAME} SHARED)
+ set_target_properties (${HDF4_MF_FCSTUB_LIBSH_TARGET} PROPERTIES
+ FOLDER libraries/fortran
+ LINKER_LANGUAGE C
+ COMPILE_DEFINITIONS "HDF;H4_BUILT_AS_DYNAMIC_LIB"
+ INTERFACE_INCLUDE_DIRECTORIES "$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/include>"
+ INTERFACE_COMPILE_DEFINITIONS H4_BUILT_AS_DYNAMIC_LIB=1
+ )
+ if (WIN32)
+ set_property (TARGET ${HDF4_MF_FCSTUB_LIBSH_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS "DOS_FS")
+ if (HDF4_BUILD_XDR_LIB)
+ set_property (TARGET ${HDF4_MF_FCSTUB_LIBSH_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS "NO_SYS_XDR_INC")
+ endif (HDF4_BUILD_XDR_LIB)
+ endif (WIN32)
+ set (install_targets ${install_targets} ${HDF4_MF_FCSTUB_LIBSH_TARGET})
+endif (BUILD_SHARED_LIBS)
-set (HDF4_MF_FORTRAN_SRCS
+set (HDF4_MF_FORTRAN_SRCS
${HDF4_MFHDF_FORTRAN_DIR}/mfsdff.f
)
+set_source_files_properties (${HDF4_MF_FORTRAN_SRCS} PROPERTIES LANGUAGE Fortran)
#-----------------------------------------------------------------------------
-add_library (${HDF4_MF_FORTRAN_LIB_TARGET} ${LIB_TYPE} ${HDF4_MF_FORTRAN_SRCS})
+add_library (${HDF4_MF_FORTRAN_LIB_TARGET} STATIC ${HDF4_MF_FORTRAN_SRCS})
set (SHARED_LINK_FLAGS " ")
-if (WIN32)
- if (${LIB_TYPE} MATCHES "SHARED")
- if (MSVC)
- set (SHARED_LINK_FLAGS "/DLL /DEF:${HDF4_MFHDF_FORTRAN_SOURCE_DIR}/mfhdf_fortrandll.def")
- endif (MSVC)
- endif (${LIB_TYPE} MATCHES "SHARED")
-endif (WIN32)
-TARGET_FORTRAN_PROPERTIES (${HDF4_MF_FORTRAN_LIB_TARGET} " " ${SHARED_LINK_FLAGS})
-set_target_properties (${HDF4_MF_FORTRAN_LIB_TARGET} PROPERTIES LINKER_LANGUAGE Fortran)
+TARGET_FORTRAN_PROPERTIES (${HDF4_MF_FORTRAN_LIB_TARGET} STATIC " " ${SHARED_LINK_FLAGS})
target_link_libraries (${HDF4_MF_FORTRAN_LIB_TARGET} ${HDF4_MF_FCSTUB_LIB_TARGET} ${LINK_LIBS})
set_global_variable (HDF4_LIBRARIES_TO_EXPORT "${HDF4_LIBRARIES_TO_EXPORT};${HDF4_MF_FORTRAN_LIB_TARGET}")
-H4_SET_LIB_OPTIONS (${HDF4_MF_FORTRAN_LIB_TARGET} ${HDF4_MF_FORTRAN_LIB_NAME} ${LIB_TYPE})
+H4_SET_LIB_OPTIONS (${HDF4_MF_FORTRAN_LIB_TARGET} ${HDF4_MF_FORTRAN_LIB_NAME} STATIC)
+set_target_properties (${HDF4_MF_FORTRAN_LIB_TARGET} PROPERTIES
+ FOLDER libraries/fortran
+ LINKER_LANGUAGE Fortran
+ Fortran_MODULE_DIRECTORY ${CMAKE_Fortran_MODULE_DIRECTORY}
+ INTERFACE_INCLUDE_DIRECTORIES "$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/include>"
+)
+if (WIN32)
+ set_property (TARGET ${HDF4_MF_FORTRAN_LIB_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS "DOS_FS")
+endif (WIN32)
+set (install_targets ${install_targets} ${HDF4_MF_FORTRAN_LIB_TARGET})
+
+if (BUILD_SHARED_LIBS)
+ add_library (${HDF4_MF_FORTRAN_LIBSH_TARGET} SHARED ${HDF4_MF_FORTRAN_SRCS})
+ set (SHARED_LINK_FLAGS " ")
+ if (WIN32 AND MSVC)
+ set (SHARED_LINK_FLAGS "/DLL /DEF:${HDF4_MFHDF_FORTRAN_SOURCE_DIR}/mfhdf_fortrandll.def")
+ endif (WIN32 AND MSVC)
+ TARGET_FORTRAN_PROPERTIES (${HDF4_MF_FORTRAN_LIBSH_TARGET} SHARED " " ${SHARED_LINK_FLAGS})
+ target_link_libraries (${HDF4_MF_FORTRAN_LIBSH_TARGET} ${HDF4_MF_FCSTUB_LIBSH_TARGET} ${LINK_SHARED_LIBS})
+ set_global_variable (HDF4_LIBRARIES_TO_EXPORT "${HDF4_LIBRARIES_TO_EXPORT};${HDF4_MF_FORTRAN_LIBSH_TARGET}")
+ H4_SET_LIB_OPTIONS (${HDF4_MF_FORTRAN_LIBSH_TARGET} ${HDF4_MF_FORTRAN_LIB_NAME} SHARED)
+ set_target_properties (${HDF4_MF_FORTRAN_LIBSH_TARGET} PROPERTIES
+ FOLDER libraries/fortran
+ LINKER_LANGUAGE Fortran
+ Fortran_MODULE_DIRECTORY ${CMAKE_Fortran_MODULE_DIRECTORY}
+ COMPILE_DEFINITIONS "H4_BUILT_AS_DYNAMIC_LIB"
+ INTERFACE_INCLUDE_DIRECTORIES "$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/include>"
+ INTERFACE_COMPILE_DEFINITIONS H4_BUILT_AS_DYNAMIC_LIB=1
+ )
+ if (WIN32)
+ set_property (TARGET ${HDF4_MF_FORTRAN_LIBSH_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS "DOS_FS")
+ endif (WIN32)
+ set (install_targets ${install_targets} ${HDF4_MF_FORTRAN_LIBSH_TARGET})
+endif (BUILD_SHARED_LIBS)
if (BUILD_TESTING)
include (CMakeTests.cmake)
@@ -104,17 +161,17 @@ install (
# Add library to CMake Install : Installs lib and cmake config info
#-----------------------------------------------------------------------------
if (BUILD_SHARED_LIBS)
- INSTALL_TARGET_PDB (${HDF4_MF_FCSTUB_LIB_TARGET} ${HDF4_INSTALL_LIB_DIR} fortlibraries)
- INSTALL_TARGET_PDB (${HDF4_MF_FORTRAN_LIB_TARGET} ${HDF4_INSTALL_LIB_DIR} fortlibraries)
+ INSTALL_TARGET_PDB (${HDF4_MF_FCSTUB_LIBSH_TARGET} ${HDF4_INSTALL_BIN_DIR} fortlibraries)
+ #INSTALL_TARGET_PDB (${HDF4_MF_FORTRAN_LIBSH_TARGET} ${HDF4_INSTALL_BIN_DIR} fortlibraries)
endif (BUILD_SHARED_LIBS)
-
+
install (
- TARGETS
- ${HDF4_MF_FCSTUB_LIB_TARGET}
- ${HDF4_MF_FORTRAN_LIB_TARGET}
- EXPORT
+ TARGETS
+ ${install_targets}
+ EXPORT
${HDF4_EXPORTED_TARGETS}
- LIBRARY DESTINATION ${HDF4_INSTALL_LIB_DIR} COMPONENT fortlibraries
+ LIBRARY DESTINATION ${HDF4_INSTALL_LIB_DIR} COMPONENT fortlibraries
ARCHIVE DESTINATION ${HDF4_INSTALL_LIB_DIR} COMPONENT fortlibraries
RUNTIME DESTINATION ${HDF4_INSTALL_BIN_DIR} COMPONENT fortlibraries
+ FRAMEWORK DESTINATION ${HDF4_INSTALL_FWRK_DIR} COMPONENT fortlibraries
)
diff --git a/mfhdf/fortran/CMakeTests.cmake b/mfhdf/fortran/CMakeTests.cmake
index a0b6615..be7f0ca 100644
--- a/mfhdf/fortran/CMakeTests.cmake
+++ b/mfhdf/fortran/CMakeTests.cmake
@@ -5,16 +5,16 @@
##############################################################################
##############################################################################
- FILE (MAKE_DIRECTORY ${PROJECT_BINARY_DIR}/testdir)
+ file (MAKE_DIRECTORY ${PROJECT_BINARY_DIR}/testdir)
#-----------------------------------------------------------------------------
# test programs
#-----------------------------------------------------------------------------
if (HDF4_ENABLE_NETCDF)
#-- Adding test for ftest
- ADD_EXECUTABLE (ftest ${HDF4_BINARY_DIR}/ftest.f ${HDF4_HDF_TESTSOURCE_DIR}/forsupff.f ${HDF4_BINARY_DIR}/netcdf.inc)
- TARGET_NAMING (ftest ${LIB_TYPE})
- TARGET_FORTRAN_PROPERTIES (ftest " " " ")
+ add_executable (ftest ${HDF4_BINARY_DIR}/ftest.f ${HDF4_HDF_TESTSOURCE_DIR}/forsupff.f ${HDF4_BINARY_DIR}/netcdf.inc)
+ TARGET_NAMING (ftest STATIC)
+ TARGET_FORTRAN_PROPERTIES (ftest STATIC " " " ")
set_target_properties (ftest PROPERTIES LINKER_LANGUAGE Fortran)
if (HDF4_BUILD_XDR_LIB)
target_link_libraries (ftest ${HDF4_MF_FORTRAN_LIB_TARGET} ${HDF4_HDF_TEST_FCSTUB_LIB_TARGET} ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${HDF4_MF_XDR_LIB_TARGET} ${LINK_LIBS})
@@ -42,9 +42,9 @@
endif (HDF4_ENABLE_NETCDF)
#-- Adding test for f_hdftest
- ADD_EXECUTABLE (f_hdftest hdftest.f)
- TARGET_NAMING (f_hdftest ${LIB_TYPE})
- TARGET_FORTRAN_PROPERTIES (f_hdftest " " " ")
+ add_executable (f_hdftest hdftest.f)
+ TARGET_NAMING (f_hdftest STATIC)
+ TARGET_FORTRAN_PROPERTIES (f_hdftest STATIC " " " ")
set_target_properties (f_hdftest PROPERTIES LINKER_LANGUAGE Fortran)
if (HDF4_BUILD_XDR_LIB)
target_link_libraries (f_hdftest ${HDF4_MF_FORTRAN_LIB_TARGET} ${HDF4_SRC_FORTRAN_LIB_TARGET} ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${HDF4_MF_XDR_LIB_TARGET} ${LINK_LIBS})
@@ -53,9 +53,9 @@
endif (HDF4_BUILD_XDR_LIB)
#-- Adding test for f_hdftest1
- ADD_EXECUTABLE (f_hdftest1 hdftest1.f)
- TARGET_NAMING (f_hdftest1 ${LIB_TYPE})
- TARGET_FORTRAN_PROPERTIES (f_hdftest1 " " " ")
+ add_executable (f_hdftest1 hdftest1.f)
+ TARGET_NAMING (f_hdftest1 STATIC)
+ TARGET_FORTRAN_PROPERTIES (f_hdftest1 STATIC " " " ")
set_target_properties (f_hdftest1 PROPERTIES LINKER_LANGUAGE Fortran)
if (HDF4_BUILD_XDR_LIB)
target_link_libraries (f_hdftest1 ${HDF4_MF_FORTRAN_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${HDF4_MF_LIB_TARGET} ${HDF4_MF_XDR_LIB_TARGET} ${LINK_LIBS})
@@ -64,9 +64,9 @@
endif (HDF4_BUILD_XDR_LIB)
#-- Adding test for f_tszip
- ADD_EXECUTABLE (f_tszip tszip.f)
- TARGET_NAMING (f_tszip ${LIB_TYPE})
- TARGET_FORTRAN_PROPERTIES (f_tszip " " " ")
+ add_executable (f_tszip tszip.f)
+ TARGET_NAMING (f_tszip STATIC)
+ TARGET_FORTRAN_PROPERTIES (f_tszip STATIC " " " ")
set_target_properties (f_tszip PROPERTIES LINKER_LANGUAGE Fortran)
if (HDF4_BUILD_XDR_LIB)
target_link_libraries (f_tszip ${HDF4_MF_FORTRAN_LIB_TARGET} ${HDF4_SRC_FORTRAN_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${HDF4_MF_LIB_TARGET} ${HDF4_MF_XDR_LIB_TARGET} ${LINK_LIBS})
@@ -148,15 +148,15 @@
add_test (NAME f_hdftest COMMAND $<TARGET_FILE:f_hdftest>)
set (passRegex "Total errors : [ ]+0")
- SET_PROPERTY (TEST f_hdftest PROPERTY PASS_REGULAR_EXPRESSION "${passRegex}")
+ set_property (TEST f_hdftest PROPERTY PASS_REGULAR_EXPRESSION "${passRegex}")
set_tests_properties (f_hdftest PROPERTIES DEPENDS MFHDF_FORTRAN-clearall-objects LABELS ${PROJECT_NAME})
add_test (NAME f_hdftest1 COMMAND $<TARGET_FILE:f_hdftest1>)
set (failRegex "failed" "errors")
- SET_PROPERTY (TEST f_hdftest1 PROPERTY FAIL_REGULAR_EXPRESSION "${failRegex}")
+ set_property (TEST f_hdftest1 PROPERTY FAIL_REGULAR_EXPRESSION "${failRegex}")
set_tests_properties (f_hdftest1 PROPERTIES DEPENDS f_hdftest LABELS ${PROJECT_NAME})
add_test (NAME f_tszip COMMAND $<TARGET_FILE:f_tszip>)
set (failRegex "failed" "errors")
- SET_PROPERTY (TEST f_tszip PROPERTY FAIL_REGULAR_EXPRESSION "${failRegex}")
+ set_property (TEST f_tszip PROPERTY FAIL_REGULAR_EXPRESSION "${failRegex}")
set_tests_properties (f_tszip PROPERTIES DEPENDS f_hdftest1 LABELS ${PROJECT_NAME})
diff --git a/mfhdf/fortran/Makefile.in b/mfhdf/fortran/Makefile.in
index fe1d7fd..aab7160 100644
--- a/mfhdf/fortran/Makefile.in
+++ b/mfhdf/fortran/Makefile.in
@@ -96,7 +96,20 @@ TESTS = $(TEST_SCRIPT)
subdir = mfhdf/fortran
SUBDIRS =
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -458,12 +471,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -506,11 +534,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/mfhdf/fortran/examples/CMakeLists.txt b/mfhdf/fortran/examples/CMakeLists.txt
index 4cb2dad..dda3651 100644
--- a/mfhdf/fortran/examples/CMakeLists.txt
+++ b/mfhdf/fortran/examples/CMakeLists.txt
@@ -1,20 +1,18 @@
-cmake_minimum_required (VERSION 2.8.10)
+cmake_minimum_required (VERSION 3.1.0)
# --------------------------------------------------------------------
# Notes: When creating examples they should be prefixed
# with "f_ex_". This allows for easier filtering of the examples.
# --------------------------------------------------------------------
-PROJECT (HDF4_MFHDF_FORTRAN_EXAMPLES C CXX Fortran)
+project (HDF4_MFHDF_FORTRAN_EXAMPLES C CXX Fortran)
#-----------------------------------------------------------------------------
# Setup include Directories
#-----------------------------------------------------------------------------
INCLUDE_DIRECTORIES (
- ${CMAKE_Fortran_MODULE_DIRECTORY}
${HDF4_HDF_BINARY_DIR}
${HDF4_HDFSOURCE_DIR}
)
LINK_DIRECTORIES (
- ${CMAKE_Fortran_MODULE_DIRECTORY}
${HDF4_MFHDF_BINARY_DIR}
${HDF4_HDF_BINARY_DIR}
${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
@@ -48,10 +46,14 @@ set (skip_examples
)
foreach (example ${examples})
- ADD_EXECUTABLE (f_exmf_${example} ${HDF4_MFHDF_FORTRAN_EXAMPLES_SOURCE_DIR}/${example}.f)
- TARGET_NAMING (f_exmf_${example} ${LIB_TYPE})
- TARGET_FORTRAN_PROPERTIES (f_exmf_${example} " " " ")
- set_target_properties (f_exmf_${example} PROPERTIES LINKER_LANGUAGE Fortran)
+ add_executable (f_exmf_${example} ${HDF4_MFHDF_FORTRAN_EXAMPLES_SOURCE_DIR}/${example}.f)
+ TARGET_NAMING (f_exmf_${example} STATIC)
+ TARGET_FORTRAN_PROPERTIES (f_exmf_${example} STATIC " " " ")
+ target_include_directories (f_exmf_${example} PRIVATE ${CMAKE_Fortran_MODULE_DIRECTORY})
+ set_target_properties (f_exmf_${example} PROPERTIES
+ LINKER_LANGUAGE Fortran
+ Fortran_MODULE_DIRECTORY ${CMAKE_Fortran_MODULE_DIRECTORY}
+ )
if (HDF4_BUILD_XDR_LIB)
target_link_libraries (f_exmf_${example} ${HDF4_MF_FORTRAN_LIB_TARGET} ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS} ${HDF4_MF_XDR_LIB_TARGET})
else (HDF4_BUILD_XDR_LIB)
@@ -60,10 +62,14 @@ foreach (example ${examples})
endforeach (example ${examples})
foreach (example ${skip_examples})
- ADD_EXECUTABLE (f_exmf_${example} ${HDF4_MFHDF_FORTRAN_EXAMPLES_SOURCE_DIR}/${example}.f)
- TARGET_NAMING (f_exmf_${example} ${LIB_TYPE})
- TARGET_FORTRAN_PROPERTIES (f_exmf_${example} " " " ")
- set_target_properties (f_exmf_${example} PROPERTIES LINKER_LANGUAGE Fortran)
+ add_executable (f_exmf_${example} ${HDF4_MFHDF_FORTRAN_EXAMPLES_SOURCE_DIR}/${example}.f)
+ TARGET_NAMING (f_exmf_${example} STATIC)
+ TARGET_FORTRAN_PROPERTIES (f_exmf_${example} STATIC " " " ")
+ target_include_directories (f_exmf_${example} PRIVATE ${CMAKE_Fortran_MODULE_DIRECTORY})
+ set_target_properties (f_exmf_${example} PROPERTIES
+ LINKER_LANGUAGE Fortran
+ Fortran_MODULE_DIRECTORY ${CMAKE_Fortran_MODULE_DIRECTORY}
+ )
if (HDF4_BUILD_XDR_LIB)
target_link_libraries (f_exmf_${example} ${HDF4_MF_FORTRAN_LIB_TARGET} ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS} ${HDF4_MF_XDR_LIB_TARGET})
else (HDF4_BUILD_XDR_LIB)
diff --git a/mfhdf/fortran/examples/Makefile.am b/mfhdf/fortran/examples/Makefile.am
index ad5f3c4..715ff15 100644
--- a/mfhdf/fortran/examples/Makefile.am
+++ b/mfhdf/fortran/examples/Makefile.am
@@ -24,7 +24,7 @@ INSTALL_FILES=SD_create_sds.f SD_write_to_sds.f SD_write_slab.f SD_alter_sds_val
SD_set_attr.f SD_get_attr.f SD_compress_sds.f SD_chunking_example.f
# Where to install example files
-EXAMPLEDIR=$(prefix)/examples/fortran
+EXAMPLEDIR=${DESTDIR}$(prefix)/examples/fortran
# How to build Fortran programs using h4fc
$(EXTRA_PROG): $(H4FC)
diff --git a/mfhdf/fortran/examples/Makefile.in b/mfhdf/fortran/examples/Makefile.in
index 47e1a35..5d9b185 100644
--- a/mfhdf/fortran/examples/Makefile.in
+++ b/mfhdf/fortran/examples/Makefile.in
@@ -92,7 +92,20 @@ DIST_COMMON = $(top_srcdir)/config/commence.am \
TESTS = $(TEST_PROG)
subdir = mfhdf/fortran/examples
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -364,12 +377,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -412,11 +440,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
@@ -511,7 +542,7 @@ INSTALL_FILES = SD_create_sds.f SD_write_to_sds.f SD_write_slab.f SD_alter_sds_v
# Where to install example files
-EXAMPLEDIR = $(prefix)/examples/fortran
+EXAMPLEDIR = ${DESTDIR}$(prefix)/examples/fortran
DISTCLEANFILES = *.chkexe *.chklog
@BUILD_SHARED_SZIP_CONDITIONAL_TRUE at LD_LIBRARY_PATH = $(LL_PATH)
diff --git a/mfhdf/fortran/mfsdf.c b/mfhdf/fortran/mfsdf.c
index 8afe570..f9e11fe 100644
--- a/mfhdf/fortran/mfsdf.c
+++ b/mfhdf/fortran/mfsdf.c
@@ -10,11 +10,8 @@
* http://hdfgroup.org/products/hdf4/doc/Copyright.html. If you do not have *
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6036 $";
-#endif
-/* $Id: mfsdf.c 6036 2014-01-20 17:28:01Z acheng $ */
+/* $Id: mfsdf.c 6357 2016-05-13 05:00:06Z bmribler $ */
/*
diff --git a/mfhdf/hdfimport/CMakeLists.txt b/mfhdf/hdfimport/CMakeLists.txt
index 7d26c32..e787e34 100644
--- a/mfhdf/hdfimport/CMakeLists.txt
+++ b/mfhdf/hdfimport/CMakeLists.txt
@@ -1,5 +1,5 @@
-cmake_minimum_required (VERSION 2.8.10)
-PROJECT (HDF4_MFHDF_HDFIMPORT)
+cmake_minimum_required (VERSION 3.1.0)
+project (HDF4_MFHDF_HDFIMPORT)
INCLUDE_DIRECTORIES (${HDF4_HDFSOURCE_DIR})
INCLUDE_DIRECTORIES (${HDF4_MFHDFSOURCE_DIR})
@@ -24,15 +24,15 @@ endif (HDF4_BUILD_XDR_LIB)
set (hdfimport_SRCS
${HDF4_MFHDF_HDFIMPORT_SOURCE_DIR}/hdfimport.c
)
-
-ADD_EXECUTABLE (hdfimport ${hdfimport_SRCS})
-TARGET_C_PROPERTIES (hdfimport " " " ")
+
+add_executable (hdfimport ${hdfimport_SRCS})
+TARGET_C_PROPERTIES (hdfimport STATIC " " " ")
if (HDF4_BUILD_XDR_LIB)
target_link_libraries (hdfimport ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS} ${HDF4_MF_XDR_LIB_TARGET})
else (HDF4_BUILD_XDR_LIB)
target_link_libraries (hdfimport ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
endif (HDF4_BUILD_XDR_LIB)
-TARGET_NAMING (hdfimport ${LIB_TYPE})
+TARGET_NAMING (hdfimport STATIC)
if (BUILD_TESTING)
include (CMakeTests.cmake)
@@ -53,8 +53,5 @@ INSTALL_PROGRAM_PDB (hdfimport ${HDF4_INSTALL_TOOLS_BIN_DIR} toolsapplications)
INSTALL (
TARGETS
hdfimport
- RUNTIME DESTINATION
- ${HDF4_INSTALL_TOOLS_BIN_DIR}
- COMPONENT
- toolsapplications
+ RUNTIME DESTINATION ${HDF4_INSTALL_TOOLS_BIN_DIR} COMPONENT toolsapplications
)
diff --git a/mfhdf/hdfimport/CMakeTests.cmake b/mfhdf/hdfimport/CMakeTests.cmake
index 52de670..bef4494 100644
--- a/mfhdf/hdfimport/CMakeTests.cmake
+++ b/mfhdf/hdfimport/CMakeTests.cmake
@@ -4,7 +4,7 @@
### T E S T I N G ###
##############################################################################
##############################################################################
-
+
#-- Copy all the dat files from the test directory into the source directory
set (HDF4_REFERENCE_TEST_FILES
hdfimport.input1
@@ -14,100 +14,100 @@
SDSfloat2.hdf
SDSfloat3.hdf
)
-
+
foreach (h4_file ${HDF4_REFERENCE_TEST_FILES})
set (dest "${PROJECT_BINARY_DIR}/${h4_file}")
#message (STATUS " Copying ${HDF4_MFHDF_HDFIMPORT_SOURCE_DIR}/${h4_file} to ${PROJECT_BINARY_DIR}/")
- ADD_CUSTOM_COMMAND (
- TARGET hdfimport
+ add_custom_command (
+ TARGET hdfimport
POST_BUILD
COMMAND ${CMAKE_COMMAND}
ARGS -E copy_if_different ${HDF4_MFHDF_HDFIMPORT_SOURCE_DIR}/${h4_file} ${dest}
- )
+ )
endforeach (h4_file ${HDF4_REFERENCE_TEST_FILES})
-
- if (WIN32)
- ADD_CUSTOM_COMMAND (
+
+ if (WIN32 AND MSVC_VERSION LESS 1900)
+ add_custom_command (
TARGET hdfimport
POST_BUILD
COMMAND ${CMAKE_COMMAND}
ARGS -E copy_if_different ${HDF4_MFHDF_HDFIMPORT_SOURCE_DIR}/hdfimport-w.out2 ${PROJECT_BINARY_DIR}/hdfimport.out2
)
- else (WIN32)
- ADD_CUSTOM_COMMAND (
+ else (WIN32 AND MSVC_VERSION LESS 1900)
+ add_custom_command (
TARGET hdfimport
POST_BUILD
COMMAND ${CMAKE_COMMAND}
ARGS -E copy_if_different ${HDF4_MFHDF_HDFIMPORT_SOURCE_DIR}/hdfimport.out2 ${PROJECT_BINARY_DIR}/hdfimport.out2
)
- endif (WIN32)
-
+ endif (WIN32 AND MSVC_VERSION LESS 1900)
+
#-- Copy all the hdfls tst files from the test directory into the source directory
set (HDF4_LS_TEST_FILES
- ctxtr2.tst
- ctxtr3.tst
- cb32i2.tst
- cb32i3.tst
- cb16i2.tst
- cb16i3.tst
- cb32r2.tst
- cb32r3.tst
- cb64r2.tst
- cb64r3.tst
- cb64r2-n.tst
- cb64r3-n.tst
+ ctxtr2.tst
+ ctxtr3.tst
+ cb32i2.tst
+ cb32i3.tst
+ cb16i2.tst
+ cb16i3.tst
+ cb32r2.tst
+ cb32r3.tst
+ cb64r2.tst
+ cb64r3.tst
+ cb64r2-n.tst
+ cb64r3-n.tst
ctxtr2_ris.tst
cb64r2_ris.tst
SDSfloat2.tst
SDSfloat3.tst
)
-
+
foreach (ls_file ${HDF4_LS_TEST_FILES})
set (dest "${PROJECT_BINARY_DIR}/${ls_file}")
#message (STATUS " Copying ${HDF4_MFHDF_HDFIMPORT_SOURCE_DIR}/${ls_file} to ${PROJECT_BINARY_DIR}/")
ADD_CUSTOM_COMMAND (
- TARGET hdfimport
+ TARGET hdfimport
POST_BUILD
COMMAND ${CMAKE_COMMAND}
ARGS -E copy_if_different ${HDF4_MFHDF_HDFIMPORT_SOURCE_DIR}/testfiles/${ls_file} ${dest}
- )
+ )
endforeach (ls_file ${HDF4_LS_TEST_FILES})
#-- hdfimporttest
set (hdfimporttest_SRCS
${HDF4_MFHDF_HDFIMPORT_SOURCE_DIR}/hdfimporttest.c
)
-
- ADD_EXECUTABLE (hdfimporttest ${hdfimporttest_SRCS})
- TARGET_C_PROPERTIES (hdfimporttest " " " ")
+
+ add_executable (hdfimporttest ${hdfimporttest_SRCS})
+ TARGET_C_PROPERTIES (hdfimporttest STATIC " " " ")
if (HDF4_BUILD_XDR_LIB)
target_link_libraries (hdfimporttest ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS} ${HDF4_MF_XDR_LIB_TARGET})
else (HDF4_BUILD_XDR_LIB)
target_link_libraries (hdfimporttest ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
endif (HDF4_BUILD_XDR_LIB)
- TARGET_NAMING (hdfimporttest ${LIB_TYPE})
+ TARGET_NAMING (hdfimporttest STATIC)
#-- gen_sds_floats
set (gen_sds_floats_SRCS
${HDF4_MFHDF_HDFIMPORT_SOURCE_DIR}/gen_sds_floats.c
)
-
- ADD_EXECUTABLE (gen_sds_floats ${gen_sds_floats_SRCS})
- TARGET_C_PROPERTIES (gen_sds_floats " " " ")
+
+ add_executable (gen_sds_floats ${gen_sds_floats_SRCS})
+ TARGET_C_PROPERTIES (gen_sds_floats STATIC " " " ")
if (HDF4_BUILD_XDR_LIB)
target_link_libraries (gen_sds_floats ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS} ${HDF4_MF_XDR_LIB_TARGET})
else (HDF4_BUILD_XDR_LIB)
target_link_libraries (gen_sds_floats ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
endif (HDF4_BUILD_XDR_LIB)
- TARGET_NAMING (gen_sds_floats ${LIB_TYPE})
-
+ TARGET_NAMING (gen_sds_floats STATIC)
+
##############################################################################
##############################################################################
### T H E T E S T S M A C R O S ###
##############################################################################
##############################################################################
- MACRO (ADD_H4_TEST resultfile resultcode testtfile testtype)
+ macro (ADD_H4_TEST resultfile resultcode testtfile testtype)
if ( NOT ${testtype} STREQUAL "")
if (${testtype} STREQUAL "N")
add_test (NAME HIMPORT-${testtfile} COMMAND $<TARGET_FILE:hdfimport> ${resultfile} -n -o ${testtfile}.hdf)
@@ -142,9 +142,9 @@
endif (HDF4_ENABLE_USING_MEMCHECKER)
set_tests_properties (HIMPORTLS-${testtfile} PROPERTIES DEPENDS HIMPORT-${testtfile} LABELS ${PROJECT_NAME})
set (last_test "HIMPORTLS-${testtfile}")
- ENDMACRO (ADD_H4_TEST)
+ endmacro (ADD_H4_TEST)
- MACRO (ADD_H4_TEST_OUT resultfile resultcode)
+ macro (ADD_H4_TEST_OUT resultfile resultcode)
add_test (NAME HIMPORT-OUT-${resultfile} COMMAND $<TARGET_FILE:hdfimport> ${resultfile}.hdf -o ${resultfile}.out)
if (NOT "${last_test}" STREQUAL "")
set_tests_properties (HIMPORT-OUT-${resultfile} PROPERTIES DEPENDS ${last_test} LABELS ${PROJECT_NAME})
@@ -170,9 +170,9 @@
endif (HDF4_ENABLE_USING_MEMCHECKER)
set_tests_properties (HIMPORTLS-OUT-${resultfile} PROPERTIES DEPENDS HIMPORT-OUT-${resultfile} LABELS ${PROJECT_NAME})
set (last_test "HIMPORTLS-OUT-${resultfile}")
- ENDMACRO (ADD_H4_TEST_OUT)
+ endmacro (ADD_H4_TEST_OUT)
- MACRO (ADD_H4_TEST_ED testfile resultfile resultcode)
+ macro (ADD_H4_TEST_ED testfile resultfile resultcode)
if (HDF4_ENABLE_USING_MEMCHECKER)
add_test (NAME HIMPORT-EDIT COMMAND $<TARGET_FILE:hdfed> -batch)
else (HDF4_ENABLE_USING_MEMCHECKER)
@@ -195,7 +195,7 @@
set_tests_properties (HIMPORT-EDIT PROPERTIES LABELS ${PROJECT_NAME})
endif (NOT "${last_test}" STREQUAL "")
set (last_test "HIMPORT-EDIT")
- ENDMACRO (ADD_H4_TEST_ED)
+ endmacro (ADD_H4_TEST_ED)
##############################################################################
##############################################################################
@@ -207,63 +207,63 @@
add_test (
NAME hdfimport-clear-refs
COMMAND ${CMAKE_COMMAND}
- -E remove
- ctxtr2
- ctxtr3
- cb32i2
- cb32i3
- cb16i2
- cb16i3
- cb32r2
- cb32r3
- cb64r2
- cb64r3
+ -E remove
+ ctxtr2
+ ctxtr3
+ cb32i2
+ cb32i3
+ cb16i2
+ cb16i3
+ cb32r2
+ cb32r3
+ cb64r2
+ cb64r3
cb64r2-n
- cb64r3-n
+ cb64r3-n
ctxtr2_ris
cb64r2_ris
- ctxtr2.hdf
- ctxtr3.hdf
- cb32i2.hdf
- cb32i3.hdf
- cb16i2.hdf
- cb16i3.hdf
- cb32r2.hdf
- cb32r3.hdf
- cb64r2.hdf
- cb64r3.hdf
- cb64r2-n.hdf
- cb64r3-n.hdf
+ ctxtr2.hdf
+ ctxtr3.hdf
+ cb32i2.hdf
+ cb32i3.hdf
+ cb16i2.hdf
+ cb16i3.hdf
+ cb32r2.hdf
+ cb32r3.hdf
+ cb64r2.hdf
+ cb64r3.hdf
+ cb64r2-n.hdf
+ cb64r3-n.hdf
ctxtr2_ris.hdf
cb64r2_ris.hdf
- ctxtr2.tmp
- ctxtr3.tmp
- cb32i2.tmp
- cb32i3.tmp
- cb16i2.tmp
- cb16i3.tmp
- cb32r2.tmp
- cb32r3.tmp
- cb64r2.tmp
- cb64r3.tmp
- cb64r2-n.tmp
- cb64r3-n.tmp
+ ctxtr2.tmp
+ ctxtr3.tmp
+ cb32i2.tmp
+ cb32i3.tmp
+ cb16i2.tmp
+ cb16i3.tmp
+ cb32r2.tmp
+ cb32r3.tmp
+ cb64r2.tmp
+ cb64r3.tmp
+ cb64r2-n.tmp
+ cb64r3-n.tmp
ctxtr2_ris.tmp
cb64r2_ris.tmp
SDSfloat2.tmp
SDSfloat3.tmp
- ctxtr2.tmp.err
- ctxtr3.tmp.err
- cb32i2.tmp.err
- cb32i3.tmp.err
- cb16i2.tmp.err
- cb16i3.tmp.err
- cb32r2.tmp.err
- cb32r3.tmp.err
- cb64r2.tmp.err
- cb64r3.tmp.err
- cb64r2-n.tmp.err
- cb64r3-n.tmp.err
+ ctxtr2.tmp.err
+ ctxtr3.tmp.err
+ cb32i2.tmp.err
+ cb32i3.tmp.err
+ cb16i2.tmp.err
+ cb16i3.tmp.err
+ cb32r2.tmp.err
+ cb32r3.tmp.err
+ cb64r2.tmp.err
+ cb64r3.tmp.err
+ cb64r2-n.tmp.err
+ cb64r3-n.tmp.err
ctxtr2_ris.tmp.err
cb64r2_ris.tmp.err
SDSfloat2.tmp.err
@@ -284,36 +284,36 @@
endif (NOT "${last_test}" STREQUAL "")
set (last_test "HIMPORTtest")
- # "Testing for 32-bit floating point ASCII (2D data)"
+ # "Testing for 32-bit floating point ASCII (2D data)"
ADD_H4_TEST (ctxtr2 0 ctxtr2 "")
- # "Testing for 32-bit floating point ASCII (3D data)"
+ # "Testing for 32-bit floating point ASCII (3D data)"
ADD_H4_TEST (ctxtr3 0 ctxtr3 "")
- # "Testing for 32-bit integer binary (2D data)"
+ # "Testing for 32-bit integer binary (2D data)"
ADD_H4_TEST (cb32i2 0 cb32i2 "")
- # "Testing for 32-bit integer binary (3D data)"
+ # "Testing for 32-bit integer binary (3D data)"
ADD_H4_TEST (cb32i3 0 cb32i3 "")
- # "Testing for 16-bit integer binary (2D data)"
+ # "Testing for 16-bit integer binary (2D data)"
ADD_H4_TEST (cb16i2 0 cb16i2 "")
- # "Testing for 16-bit integer (3D data)"
+ # "Testing for 16-bit integer (3D data)"
ADD_H4_TEST (cb16i3 0 cb16i3 "")
- # "Testing for 32-bit floating point binary (2D data)"
+ # "Testing for 32-bit floating point binary (2D data)"
ADD_H4_TEST (cb32r2 0 cb32r2 "")
- # "Testing for 32-bit floating point binary (3D data)"
+ # "Testing for 32-bit floating point binary (3D data)"
ADD_H4_TEST (cb32r3 0 cb32r3 "")
- # "Testing for 64-bit floating point binary (2D data) - Default Behaviour (Conversion to 32 bit FP SDS)"
+ # "Testing for 64-bit floating point binary (2D data) - Default Behaviour (Conversion to 32 bit FP SDS)"
ADD_H4_TEST (cb64r2 0 cb64r2 "")
- # "Testing for 64-bit floating point binary (3D data) - Default Behaviour (Conversion to 32-bit FP SDS)"
+ # "Testing for 64-bit floating point binary (3D data) - Default Behaviour (Conversion to 32-bit FP SDS)"
ADD_H4_TEST (cb64r3 0 cb64r3 "")
- # "Testing for 64-bit floating point binary (2D data) - Conversion to 64-bit FP SDS"
+ # "Testing for 64-bit floating point binary (2D data) - Conversion to 64-bit FP SDS"
ADD_H4_TEST (cb64r2 0 cb64r2-n "N")
- # "Testing for 64-bit floating point binary (3D data) - Conversion to 64-bit FP SDS"
+ # "Testing for 64-bit floating point binary (3D data) - Conversion to 64-bit FP SDS"
ADD_H4_TEST (cb64r3 0 cb64r3-n "N")
- # "Testing for raster options"
+ # "Testing for raster options"
ADD_H4_TEST (ctxtr2 0 ctxtr2_ris "R" -e 50 50)
ADD_H4_TEST (cb64r2 0 cb64r2_ris "R" -i 50 50 -f)
#
# test with hdf files
- # "Testing for reading from hdf files"
+ # "Testing for reading from hdf files"
ADD_H4_TEST_OUT (SDSfloat2 0)
ADD_H4_TEST_OUT (SDSfloat3 0)
diff --git a/mfhdf/hdfimport/Makefile.in b/mfhdf/hdfimport/Makefile.in
index 171b135..b2fdd9b 100644
--- a/mfhdf/hdfimport/Makefile.in
+++ b/mfhdf/hdfimport/Makefile.in
@@ -91,7 +91,20 @@ check_PROGRAMS = hdfimporttest$(EXEEXT) gen_sds_floats$(EXEEXT)
TESTS = $(am__EXEEXT_1) $(TEST_SCRIPT)
subdir = mfhdf/hdfimport
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -425,12 +438,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -473,11 +501,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/mfhdf/hdiff/CMakeLists.txt b/mfhdf/hdiff/CMakeLists.txt
index 009b2dd..c50f79c 100644
--- a/mfhdf/hdiff/CMakeLists.txt
+++ b/mfhdf/hdiff/CMakeLists.txt
@@ -1,5 +1,5 @@
-cmake_minimum_required (VERSION 2.8.10)
-PROJECT (HDF4_MFHDF_HDIFF)
+cmake_minimum_required (VERSION 3.1.0)
+project (HDF4_MFHDF_HDIFF)
INCLUDE_DIRECTORIES (${HDF4_HDFSOURCE_DIR})
INCLUDE_DIRECTORIES (${HDF4_MFHDFSOURCE_DIR})
@@ -30,15 +30,15 @@ set (hdiff_SRCS
${HDF4_MFHDF_HDIFF_SOURCE_DIR}/hdiff_dim.c
${HDF4_SOURCE_DIR}/mfhdf/util/getopt.c
)
-
-ADD_EXECUTABLE(hdiff ${hdiff_SRCS})
-TARGET_C_PROPERTIES (hdiff " " " ")
+
+add_executable(hdiff ${hdiff_SRCS})
+TARGET_C_PROPERTIES (hdiff STATIC " " " ")
if (HDF4_BUILD_XDR_LIB)
TARGET_LINK_LIBRARIES(hdiff ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS} ${HDF4_MF_XDR_LIB_TARGET})
else (HDF4_BUILD_XDR_LIB)
TARGET_LINK_LIBRARIES(hdiff ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
endif (HDF4_BUILD_XDR_LIB)
-TARGET_NAMING (hdiff ${LIB_TYPE})
+TARGET_NAMING (hdiff STATIC)
if (BUILD_TESTING)
include (CMakeTests.cmake)
@@ -59,8 +59,5 @@ INSTALL_PROGRAM_PDB (hdiff ${HDF4_INSTALL_TOOLS_BIN_DIR} toolsapplications)
INSTALL (
TARGETS
hdiff
- RUNTIME DESTINATION
- ${HDF4_INSTALL_TOOLS_BIN_DIR}
- COMPONENT
- toolsapplications
+ RUNTIME DESTINATION ${HDF4_INSTALL_TOOLS_BIN_DIR} COMPONENT toolsapplications
)
diff --git a/mfhdf/hdiff/CMakeTests.cmake b/mfhdf/hdiff/CMakeTests.cmake
index 283cf06..cd9246c 100644
--- a/mfhdf/hdiff/CMakeTests.cmake
+++ b/mfhdf/hdiff/CMakeTests.cmake
@@ -13,9 +13,9 @@
set (hdifftst_SRCS
${HDF4_MFHDF_HDIFF_SOURCE_DIR}/hdifftst.c
)
-
+
ADD_EXECUTABLE (hdifftst ${hdifftst_SRCS})
- TARGET_C_PROPERTIES (hdifftst " " " ")
+ TARGET_C_PROPERTIES (hdifftst STATIC " " " ")
if (HDF4_BUILD_XDR_LIB)
target_link_libraries (hdifftst ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS} ${HDF4_MF_XDR_LIB_TARGET})
else (HDF4_BUILD_XDR_LIB)
@@ -27,7 +27,7 @@
add_test (
NAME HDIFF-GEN-clearall-objects
COMMAND ${CMAKE_COMMAND}
- -E remove
+ -E remove
hdifftst1.hdf
hdifftst2.hdf
hdifftst3.hdf
@@ -37,7 +37,7 @@
hdifftst7.hdf
)
set (last_test "HDIFF-GEN-clearall-objects")
-
+
add_test (NAME hdifftst COMMAND $<TARGET_FILE:hdifftst>)
set_tests_properties (hdifftst PROPERTIES DEPENDS ${last_test} LABELS ${PROJECT_NAME})
set (last_test "hdifftst")
@@ -71,7 +71,7 @@
endif (NOT "${last_test}" STREQUAL "")
set (last_test "HDIFF-${resultfile}")
ENDMACRO (ADD_H4_TEST file)
-
+
#-- Copy all the data files from the test directory into the source directory
set (HDF4_REFERENCE_TEST_FILES
hdifftst1.hdf
@@ -99,12 +99,12 @@
hdiff_14.txt
hdiff_15.txt
)
-
+
foreach (h4_file ${HDF4_REFERENCE_TEST_FILES})
set (dest "${PROJECT_BINARY_DIR}/testfiles/${h4_file}")
#MESSAGE(STATUS " Copying ${HDF4_MFHDF_HDIFF_SOURCE_DIR}/testfiles/${h4_file} to ${PROJECT_BINARY_DIR}/testfiles/")
ADD_CUSTOM_COMMAND (
- TARGET hdiff
+ TARGET hdiff
POST_BUILD
COMMAND ${CMAKE_COMMAND}
ARGS -E copy_if_different ${HDF4_MFHDF_HDIFF_SOURCE_DIR}/testfiles/${h4_file} ${dest}
@@ -122,7 +122,7 @@
)
endforeach (out_file ${HDF4_REFERENCE_FILES})
- if (WIN32)
+ if (WIN32 AND MSVC_VERSION LESS 1900)
ADD_CUSTOM_COMMAND (
TARGET hdiff
POST_BUILD
@@ -136,7 +136,7 @@
COMMAND ${CMAKE_COMMAND}
ARGS -E copy_if_different ${HDF4_MFHDF_HDIFF_SOURCE_DIR}/testfiles/hdiff_06.txt ${PROJECT_BINARY_DIR}/testfiles/hdiff_06.txt
)
- endif (WIN32)
+ endif (WIN32 AND MSVC_VERSION LESS 1900)
##############################################################################
##############################################################################
@@ -147,7 +147,7 @@
add_test (
NAME HDIFF-clearall-objects
COMMAND ${CMAKE_COMMAND}
- -E remove
+ -E remove
hdiff_01.out
hdiff_02.out
hdiff_03.out
@@ -187,7 +187,7 @@
set (last_test "HDIFF-clearall-objects")
# help message
- ADD_H4_TEST (hdiff_01 1)
+ ADD_H4_TEST (hdiff_01 1)
# Compare global attributes only
ADD_H4_TEST (hdiff_02 1 -g hdifftst1.hdf hdifftst2.hdf)
@@ -207,7 +207,7 @@
# Compare SD data on variable(s)
ADD_H4_TEST (hdiff_07 1 -d -v dset1 hdifftst1.hdf hdifftst2.hdf)
- # Compare vdata on variable(s)
+ # Compare vdata on variable(s)
ADD_H4_TEST (hdiff_08 1 -D -u vdata1 hdifftst1.hdf hdifftst2.hdf)
# Print difference up to count number
@@ -222,7 +222,7 @@
# percent (relative)
ADD_H4_TEST (hdiff_12 1 -d -p 0.05 -v dset3 hdifftst1.hdf hdifftst2.hdf)
- # hyperslab reading
+ # hyperslab reading
ADD_H4_TEST (hdiff_13 0 hdifftst3.hdf hdifftst4.hdf)
# lone dim
diff --git a/mfhdf/hdiff/Makefile.in b/mfhdf/hdiff/Makefile.in
index 15f6679..b28f1f9 100644
--- a/mfhdf/hdiff/Makefile.in
+++ b/mfhdf/hdiff/Makefile.in
@@ -91,7 +91,20 @@ noinst_PROGRAMS = hdifftst$(EXEEXT)
TESTS = $(am__EXEEXT_1) $(TEST_SCRIPT)
subdir = mfhdf/hdiff
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -421,12 +434,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -469,11 +497,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/mfhdf/hdiff/hdiff_dim.c b/mfhdf/hdiff/hdiff_dim.c
index 5f8d565..a593aaa 100644
--- a/mfhdf/hdiff/hdiff_dim.c
+++ b/mfhdf/hdiff/hdiff_dim.c
@@ -373,8 +373,8 @@ void diff_match_dim_table_add (diff_match_dim_table_t *table,
if (table->nobjs == table->size)
{
table->size *= 2;
- table->objs = (diff_match_dim_name_t*)realloc(table->objs, table->size * sizeof(diff_match_dim_name_t));
-
+ table->objs = (diff_match_dim_name_t*)HDrealloc(table->objs, table->size * sizeof(diff_match_dim_name_t));
+
for (i = table->nobjs; i < table->size; i++) {
table->objs[i].ref = -1;
table->objs[i].flags[0] = table->objs[i].flags[1] = -1;
@@ -383,7 +383,7 @@ void diff_match_dim_table_add (diff_match_dim_table_t *table,
i = table->nobjs++;
table->objs[i].ref = ref;
- strcpy(table->objs[i].dim_name,dim_name);
+ HDstrcpy(table->objs[i].dim_name,dim_name);
table->objs[i].flags[0] = flags[0];
table->objs[i].flags[1] = flags[1];
}
@@ -407,12 +407,12 @@ static
void diff_match_dim_table_init( diff_match_dim_table_t **tbl )
{
int i;
- diff_match_dim_table_t *table = (diff_match_dim_table_t*) malloc(sizeof(diff_match_dim_table_t));
-
+ diff_match_dim_table_t *table = (diff_match_dim_table_t*)HDmalloc(sizeof(diff_match_dim_table_t));
+
table->size = 20;
table->nobjs = 0;
- table->objs = (diff_match_dim_name_t*) malloc(table->size * sizeof(diff_match_dim_name_t));
-
+ table->objs = (diff_match_dim_name_t*)HDmalloc(table->size * sizeof(diff_match_dim_name_t));
+
for (i = 0; i < table->size; i++) {
table->objs[i].ref = -1;
table->objs[i].flags[0] = table->objs[i].flags[1] = -1;
@@ -440,8 +440,8 @@ void diff_match_dim_table_init( diff_match_dim_table_t **tbl )
static
void diff_match_dim_table_free( diff_match_dim_table_t *table )
{
- free(table->objs);
- free(table);
+ HDfree(table->objs);
+ HDfree(table);
}
@@ -471,8 +471,8 @@ void diff_dim_table_add(diff_dim_table_t *table,
if (table->nobjs == table->size)
{
table->size *= 2;
- table->objs = (diff_dim_name_t*)realloc(table->objs, table->size * sizeof(diff_dim_name_t));
-
+ table->objs = (diff_dim_name_t*)HDrealloc(table->objs, table->size * sizeof(diff_dim_name_t));
+
for (i = table->nobjs; i < table->size; i++) {
table->objs[i].ref = -1;
}
@@ -480,7 +480,7 @@ void diff_dim_table_add(diff_dim_table_t *table,
i = table->nobjs++;
table->objs[i].ref = ref;
- strcpy(table->objs[i].dim_name,name);
+ HDstrcpy(table->objs[i].dim_name,name);
}
@@ -502,12 +502,12 @@ void diff_dim_table_add(diff_dim_table_t *table,
void diff_dim_table_init( diff_dim_table_t **tbl )
{
int i;
- diff_dim_table_t* table = (diff_dim_table_t*) malloc(sizeof(diff_dim_table_t));
-
+ diff_dim_table_t* table = (diff_dim_table_t*)HDmalloc(sizeof(diff_dim_table_t));
+
table->size = 20;
table->nobjs = 0;
- table->objs = (diff_dim_name_t*) malloc(table->size * sizeof(diff_dim_name_t));
-
+ table->objs = (diff_dim_name_t*)HDmalloc(table->size * sizeof(diff_dim_name_t));
+
for (i = 0; i < table->size; i++) {
table->objs[i].ref = -1;
}
@@ -531,7 +531,7 @@ void diff_dim_table_init( diff_dim_table_t **tbl )
void diff_dim_table_free( diff_dim_table_t *table )
{
- free(table->objs);
- free(table);
+ HDfree(table->objs);
+ HDfree(table);
}
diff --git a/mfhdf/hdiff/hdiff_gr.c b/mfhdf/hdiff/hdiff_gr.c
index 1ee5269..f7f38a2 100644
--- a/mfhdf/hdiff/hdiff_gr.c
+++ b/mfhdf/hdiff/hdiff_gr.c
@@ -281,8 +281,8 @@ uint32 diff_gr( int32 gr1_id,
GRendaccess(ri1_id);
GRendaccess(ri2_id);
- if (buf1) free(buf1);
- if (buf2) free(buf2);
+ if (buf1) HDfree(buf1);
+ if (buf2) HDfree(buf2);
return nfound;
@@ -297,8 +297,8 @@ out:
GRendaccess(ri1_id);
if (ri2_id!=-1)
GRendaccess(ri2_id);
- if (buf1) free(buf1);
- if (buf2) free(buf2);
+ if (buf1) HDfree(buf1);
+ if (buf2) HDfree(buf2);
return 0;
}
diff --git a/mfhdf/hdiff/hdiff_list.c b/mfhdf/hdiff/hdiff_list.c
index 5f806a7..21f223c 100644
--- a/mfhdf/hdiff/hdiff_list.c
+++ b/mfhdf/hdiff/hdiff_list.c
@@ -195,8 +195,8 @@ int hdiff_list_vg(const char* fname,
* use the nlones returned to allocate sufficient space for the
* buffer ref_array to hold the reference numbers of all lone vgroups,
*/
- ref_array = (int32 *) malloc(sizeof(int32) * nlones);
-
+ ref_array = (int32 *)HDmalloc(sizeof(int32) * nlones);
+
/*
* and call Vlone again to retrieve the reference numbers into
* the buffer ref_array.
@@ -297,8 +297,8 @@ int hdiff_list_vg(const char* fname,
ntagrefs = Vntagrefs(vg_id);
if ( ntagrefs > 0 )
{
- tags = (int32 *) malloc(sizeof(int32) * ntagrefs);
- refs = (int32 *) malloc(sizeof(int32) * ntagrefs);
+ tags = (int32 *)HDmalloc(sizeof(int32) * ntagrefs);
+ refs = (int32 *)HDmalloc(sizeof(int32) * ntagrefs);
Vgettagrefs(vg_id, tags, refs, ntagrefs);
insert_vg(fname,
@@ -312,11 +312,11 @@ int hdiff_list_vg(const char* fname,
table,
td1,
td2);
-
- if (tags )
- free (tags);
- if (refs)
- free (refs);
+
+ if (tags )
+ HDfree(tags);
+ if (refs)
+ HDfree(refs);
}
if(Vdetach (vg_id)==FAIL)
@@ -325,14 +325,14 @@ int hdiff_list_vg(const char* fname,
goto out;
}
- free (vg_name);
-
+ HDfree(vg_name);
+
} /* for */
/* free the space allocated */
- if (ref_array)
- free (ref_array);
+ if (ref_array)
+ HDfree(ref_array);
} /* if */
@@ -348,8 +348,8 @@ int hdiff_list_vg(const char* fname,
out:
Vend (file_id);
- if (ref_array)
- free (ref_array);
+ if (ref_array)
+ HDfree(ref_array);
return FAIL;
@@ -456,8 +456,8 @@ int insert_vg(const char* fname,
ntagrefs = Vntagrefs(vg_id);
if ( ntagrefs > 0 )
{
- tags = (int32 *) malloc(sizeof(int32) * ntagrefs);
- refs = (int32 *) malloc(sizeof(int32) * ntagrefs);
+ tags = (int32 *)HDmalloc(sizeof(int32) * ntagrefs);
+ refs = (int32 *)HDmalloc(sizeof(int32) * ntagrefs);
Vgettagrefs(vg_id, tags, refs, ntagrefs);
/* recurse */
@@ -472,17 +472,17 @@ int insert_vg(const char* fname,
table,
td1,
td2);
-
- free (tags);
- free (refs);
+
+ HDfree(tags);
+ HDfree(refs);
}
if(Vdetach (vg_id)==FAIL)
{
printf("Error: Could not detach group <%s>\n", vg_name);
}
if (path)
- free(path);
-
+ HDfree(path);
+
break;
@@ -702,8 +702,8 @@ int hdiff_list_vs(int32 file_id,
* use the nlones returned to allocate sufficient space for the
* buffer ref_array to hold the reference numbers of all lone vgroups,
*/
- ref_array = (int32 *) malloc(sizeof(int32) * nlones);
-
+ ref_array = (int32 *)HDmalloc(sizeof(int32) * nlones);
+
/*
* and call VSlone again to retrieve the reference numbers into
* the buffer ref_array.
@@ -735,7 +735,7 @@ int hdiff_list_vs(int32 file_id,
/* free the space allocated */
- if (ref_array) free (ref_array);
+ if (ref_array) HDfree (ref_array);
} /* if */
/* terminate access to the VS interface */
@@ -1134,8 +1134,8 @@ int insert_sds(int32 file_id,
SDendaccess(sds_id);
if (path)
- free(path);
-
+ HDfree(path);
+
return 0;
}
@@ -1304,8 +1304,8 @@ int insert_gr(int32 file_id,
GRendaccess(ri_id);
if (path)
- free(path);
-
+ HDfree(path);
+
return 0;
}
@@ -1450,8 +1450,8 @@ out:
VSdetach (vdata_id);
if (path)
- free(path);
-
+ HDfree(path);
+
return ret;
}
@@ -1488,7 +1488,7 @@ int is_reserved(char*vg_class)
}
/* class and name(partial) for chunk table i.e. Vdata */
- if( (strncmp(vg_class,"_HDF_CHK_TBL_",13)==0))
+ if( (HDstrncmp(vg_class,"_HDF_CHK_TBL_",13)==0))
{
ret=1;
}
@@ -1518,15 +1518,15 @@ char *get_path(char*path_name,
/* initialize path */
if (path_name!=NULL)
{
- path = (char*) malloc(strlen(path_name) + strlen(obj_name) + 2);
- strcpy( path, path_name );
- strcat( path, "/" );
- strcat( path, obj_name );
+ path = (char*)HDmalloc(strlen(path_name) + strlen(obj_name) + 2);
+ HDstrcpy( path, path_name );
+ HDstrcat( path, "/" );
+ HDstrcat( path, obj_name );
}
else
{
- path = (char*) malloc(strlen(obj_name) + 1);
- strcpy( path, obj_name );
+ path = (char*)HDmalloc(strlen(obj_name) + 1);
+ HDstrcpy( path, obj_name );
}
return path;
}
diff --git a/mfhdf/hdiff/hdiff_mattbl.c b/mfhdf/hdiff/hdiff_mattbl.c
index c12abe3..23d8e13 100644
--- a/mfhdf/hdiff/hdiff_mattbl.c
+++ b/mfhdf/hdiff/hdiff_mattbl.c
@@ -49,8 +49,8 @@ void match_table_add (match_table_t *table,
if (table->nobjs == table->size) {
table->size *= 2;
- table->objs = (match_info_t*)realloc(table->objs, table->size * sizeof(match_info_t));
-
+ table->objs = (match_info_t*)HDrealloc(table->objs, table->size * sizeof(match_info_t));
+
for (i = table->nobjs; i < table->size; i++) {
table->objs[i].tag1 = table->objs[i].ref1 = -1;
table->objs[i].tag2 = table->objs[i].ref2 = -1;
@@ -63,7 +63,7 @@ void match_table_add (match_table_t *table,
table->objs[i].ref1 = ref1;
table->objs[i].tag2 = tag2;
table->objs[i].ref2 = ref2;
- strcpy(table->objs[i].obj_name,path);
+ HDstrcpy(table->objs[i].obj_name,path);
table->objs[i].flags[0] = flags[0];
table->objs[i].flags[1] = flags[1];
}
@@ -86,12 +86,12 @@ void match_table_add (match_table_t *table,
void match_table_init( match_table_t **tbl )
{
uint32 i;
- match_table_t* table = (match_table_t*) malloc(sizeof(match_table_t));
-
+ match_table_t* table = (match_table_t*)HDmalloc(sizeof(match_table_t));
+
table->size = 20;
table->nobjs = 0;
- table->objs = (match_info_t*) malloc(table->size * sizeof(match_info_t));
-
+ table->objs = (match_info_t*)HDmalloc(table->size * sizeof(match_info_t));
+
for (i = 0; i < table->size; i++) {
table->objs[i].tag1 = table->objs[i].ref1 = -1;
table->objs[i].tag2 = table->objs[i].ref2 = -1;
@@ -119,8 +119,8 @@ void match_table_init( match_table_t **tbl )
void match_table_free( match_table_t *table )
{
- free(table->objs);
- free(table);
+ HDfree(table->objs);
+ HDfree(table);
}
diff --git a/mfhdf/hdiff/hdiff_sds.c b/mfhdf/hdiff/hdiff_sds.c
index fb6acda..ecf1b06 100644
--- a/mfhdf/hdiff/hdiff_sds.c
+++ b/mfhdf/hdiff/hdiff_sds.c
@@ -363,7 +363,7 @@ uint32 diff_sds(int32 sd1_id,
sm_nelmts = sm_nbytes / p_type_nbytes;
/* the stripmine loop */
- memset(hs_offset, 0, sizeof hs_offset);
+ HDmemset(hs_offset, 0, sizeof hs_offset);
for (elmtno = 0; elmtno < p_nelmts; elmtno += hs_nelmts)
{
@@ -440,12 +440,12 @@ uint32 diff_sds(int32 sd1_id,
/* free */
if (sm_buf1!=NULL)
{
- free(sm_buf1);
+ HDfree(sm_buf1);
sm_buf1=NULL;
}
if (sm_buf2!=NULL)
{
- free(sm_buf2);
+ HDfree(sm_buf2);
sm_buf2=NULL;
}
@@ -552,12 +552,12 @@ uint32 diff_sds_attrs(int32 sds1_id,
continue;
}
- attr1_buf = (void *) malloc((unsigned)nelms1*DFKNTsize(dtype1 | DFNT_NATIVE));
+ attr1_buf = (void *)HDmalloc((unsigned)nelms1*DFKNTsize(dtype1 | DFNT_NATIVE));
if (!attr1_buf) {
printf("Out of memory!");
goto out;;
}
- attr2_buf = (void *) malloc((unsigned)nelms2*DFKNTsize(dtype2 | DFNT_NATIVE));
+ attr2_buf = (void *)HDmalloc((unsigned)nelms2*DFKNTsize(dtype2 | DFNT_NATIVE));
if (!attr2_buf) {
printf("Out of memory!");
goto out;
@@ -588,8 +588,8 @@ uint32 diff_sds_attrs(int32 sds1_id,
nfound++;
}
- if (attr1_buf) free(attr1_buf);
- if (attr2_buf) free(attr2_buf);
+ if (attr1_buf) HDfree(attr1_buf);
+ if (attr2_buf) HDfree(attr2_buf);
}
@@ -598,10 +598,10 @@ uint32 diff_sds_attrs(int32 sds1_id,
out:
- if (attr1_buf) free(attr1_buf);
- if (attr2_buf) free(attr2_buf);
- opt->err_stat = 1;
- return 0;
+ if (attr1_buf) HDfree(attr1_buf);
+ if (attr2_buf) HDfree(attr2_buf);
+ opt->err_stat = 1;
+ return 0;
}
diff --git a/mfhdf/hdiff/hdiff_vs.c b/mfhdf/hdiff/hdiff_vs.c
index 1bbf1bb..37d3cfd 100644
--- a/mfhdf/hdiff/hdiff_vs.c
+++ b/mfhdf/hdiff/hdiff_vs.c
@@ -246,8 +246,8 @@ static uint32 vdata_cmp(int32 vs1,
/* compare the data */
- buf1 = (uint8 *) malloc((unsigned) (nv1 * vsize1));
- buf2 = (uint8 *) malloc((unsigned) (nv2 * vsize2));
+ buf1 = (uint8 *)HDmalloc((unsigned) (nv1 * vsize1));
+ buf2 = (uint8 *)HDmalloc((unsigned) (nv2 * vsize2));
if (!buf1 || !buf2)
{
printf("Out of memory!");
@@ -280,7 +280,7 @@ static uint32 vdata_cmp(int32 vs1,
{
for (i=0; i<nv1; i++)
{
- if (memcmp(b1, b2, (size_t)vsize1) == 0)
+ if (HDmemcmp(b1, b2, (size_t)vsize1) == 0)
{
b1 += vsize1;
b2 += vsize2;
@@ -376,8 +376,8 @@ static uint32 vdata_cmp(int32 vs1,
}
- if (buf1)free((char *) buf1);
- if (buf2)free((char *) buf2);
+ if (buf1) HDfree((char *) buf1);
+ if (buf2) HDfree((char *) buf2);
return nfound;
}
diff --git a/mfhdf/hrepack/CMakeLists.txt b/mfhdf/hrepack/CMakeLists.txt
index fa4b4d6..7e7a95d 100644
--- a/mfhdf/hrepack/CMakeLists.txt
+++ b/mfhdf/hrepack/CMakeLists.txt
@@ -1,5 +1,5 @@
-cmake_minimum_required (VERSION 2.8.10)
-PROJECT (HDF4_MFHDF_HREPACK)
+cmake_minimum_required (VERSION 3.1.0)
+project (HDF4_MFHDF_HREPACK)
INCLUDE_DIRECTORIES (${HDF4_HDFSOURCE_DIR})
INCLUDE_DIRECTORIES (${HDF4_MFHDFSOURCE_DIR})
@@ -33,24 +33,24 @@ set (hrepack_SRCS
${HDF4_MFHDF_HREPACK_SOURCE_DIR}/hrepack_vs.c
${HDF4_MFHDF_HREPACK_SOURCE_DIR}/hrepack_dim.c
)
-
-ADD_EXECUTABLE (hrepack ${hrepack_SRCS})
-TARGET_C_PROPERTIES (hrepack " " " ")
+
+add_executable (hrepack ${hrepack_SRCS})
+TARGET_C_PROPERTIES (hrepack STATIC " " " ")
if (HDF4_BUILD_XDR_LIB)
target_link_libraries (hrepack ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS} ${HDF4_MF_XDR_LIB_TARGET})
else (HDF4_BUILD_XDR_LIB)
target_link_libraries (hrepack ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
endif (HDF4_BUILD_XDR_LIB)
-TARGET_NAMING (hrepack ${LIB_TYPE})
+TARGET_NAMING (hrepack STATIC)
-ADD_EXECUTABLE (hrepack_check ${HDF4_MFHDF_HREPACK_SOURCE_DIR}/hrepack_check.c)
-TARGET_C_PROPERTIES (hrepack_check " " " ")
+add_executable (hrepack_check ${HDF4_MFHDF_HREPACK_SOURCE_DIR}/hrepack_check.c)
+TARGET_C_PROPERTIES (hrepack_check STATIC " " " ")
if (HDF4_BUILD_XDR_LIB)
target_link_libraries (hrepack_check ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS} ${HDF4_MF_XDR_LIB_TARGET})
else (HDF4_BUILD_XDR_LIB)
target_link_libraries (hrepack_check ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
endif (HDF4_BUILD_XDR_LIB)
-TARGET_NAMING (hrepack_check ${LIB_TYPE})
+TARGET_NAMING (hrepack_check STATIC)
if (BUILD_TESTING)
include (CMakeTests.cmake)
@@ -71,8 +71,5 @@ INSTALL_PROGRAM_PDB (hrepack ${HDF4_INSTALL_TOOLS_BIN_DIR} toolsapplications)
INSTALL (
TARGETS
hrepack hrepack_check
- RUNTIME DESTINATION
- ${HDF4_INSTALL_TOOLS_BIN_DIR}
- COMPONENT
- toolsapplications
+ RUNTIME DESTINATION ${HDF4_INSTALL_TOOLS_BIN_DIR} COMPONENT toolsapplications
)
diff --git a/mfhdf/hrepack/CMakeTests.cmake b/mfhdf/hrepack/CMakeTests.cmake
index da49967..2784424 100644
--- a/mfhdf/hrepack/CMakeTests.cmake
+++ b/mfhdf/hrepack/CMakeTests.cmake
@@ -14,12 +14,12 @@
image8.txt
info.txt
)
-
+
foreach (h4_file ${HDF4_REPACK_TEST_FILES})
set (dest "${PROJECT_BINARY_DIR}/${h4_file}")
#message (STATUS " Copying ${HDF4_MFHDF_HREPACK_SOURCE_DIR}/${h4_file} to ${PROJECT_BINARY_DIR}/")
- ADD_CUSTOM_COMMAND (
- TARGET hrepack_check
+ add_custom_command (
+ TARGET hrepack_check
POST_BUILD
COMMAND ${CMAKE_COMMAND}
ARGS -E copy_if_different ${HDF4_MFHDF_HREPACK_SOURCE_DIR}/${h4_file} ${dest}
@@ -27,28 +27,28 @@
endforeach (h4_file ${HDF4_REPACK_TEST_FILES})
#-- Adding test for test_hrepack for generating testfiles
- ADD_EXECUTABLE (test_hrepack ${HDF4_MFHDF_HREPACK_SOURCE_DIR}/hrepacktst.c)
- TARGET_C_PROPERTIES (test_hrepack " " " ")
+ add_executable (test_hrepack ${HDF4_MFHDF_HREPACK_SOURCE_DIR}/hrepacktst.c)
+ TARGET_C_PROPERTIES (test_hrepack STATIC " " " ")
if (HDF4_BUILD_XDR_LIB)
target_link_libraries (test_hrepack ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS} ${HDF4_MF_XDR_LIB_TARGET})
else (HDF4_BUILD_XDR_LIB)
target_link_libraries (test_hrepack ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
endif (HDF4_BUILD_XDR_LIB)
- TARGET_NAMING (test_hrepack ${LIB_TYPE})
+ TARGET_NAMING (test_hrepack STATIC)
- MACRO (ADD_H4_TEST testname testtype testfile)
- if (${testtype} STREQUAL "SKIP")
+ macro (ADD_H4_TEST testname testtype testfile)
+ if ("${testtype}" STREQUAL "SKIP")
if (NOT HDF4_ENABLE_USING_MEMCHECKER)
add_test (
NAME HREPACK-${testname}-SKIPPED
COMMAND ${CMAKE_COMMAND} -E echo "SKIP -v -i ${PROJECT_BINARY_DIR}/${testfile} -o ${PROJECT_BINARY_DIR}/out-${testname}.${testfile} ${ARGN}"
)
endif (NOT HDF4_ENABLE_USING_MEMCHECKER)
- else (${testtype} STREQUAL "SKIP")
+ else ("${testtype}" STREQUAL "SKIP")
add_test (
NAME HREPACK-${testname}-clearall-objects
COMMAND ${CMAKE_COMMAND}
- -E remove
+ -E remove
out-${testname}.${testfile}
)
set_tests_properties (HREPACK-${testname}-clearall-objects PROPERTIES DEPENDS HREPACK-test_hrepack LABELS ${PROJECT_NAME})
@@ -62,8 +62,8 @@
COMMAND $<TARGET_FILE:hdiff> ${PROJECT_BINARY_DIR}/${testfile} ${PROJECT_BINARY_DIR}/out-${testname}.${testfile}
)
set_tests_properties (HREPACK-${testname}_DFF PROPERTIES DEPENDS HREPACK-${testname} LABELS ${PROJECT_NAME})
- endif (${testtype} STREQUAL "SKIP")
- ENDMACRO (ADD_H4_TEST)
+ endif ("${testtype}" STREQUAL "SKIP")
+ endmacro (ADD_H4_TEST)
##############################################################################
##############################################################################
@@ -74,7 +74,7 @@
add_test (
NAME HREPACK-hrepack-clearall-objects
COMMAND ${CMAKE_COMMAND}
- -E remove
+ -E remove
hrepack_help.out
hrepack_check_help.out
hrepacktst1.hdf
@@ -114,50 +114,50 @@
add_test (NAME HREPACK-test_hrepack COMMAND $<TARGET_FILE:test_hrepack>)
set_tests_properties (HREPACK-test_hrepack PROPERTIES DEPENDS HREPACK-hrepack-clearall-objects LABELS ${PROJECT_NAME})
-
+
set (HREPACK_FILE1 hrepacktst1.hdf)
set (HREPACK_FILE2 hrepacktst2.hdf)
set (HREPACK_FILE3 hrepacktst3.hdf)
#-------------------------------------------------------------------------
- # test1:
+ # test1:
# HUFF
#-------------------------------------------------------------------------
#
ADD_H4_TEST (HUFF "TEST" ${HREPACK_FILE1} -t "dset7:HUFF 1" -c dset7:10x8x6)
-
-# if ( sds_verifiy_comp("dset7",COMP_CODE_SKPHUFF, 1) == -1)
+
+# if ( sds_verifiy_comp("dset7",COMP_CODE_SKPHUFF, 1) == -1)
# goto out;
-# if ( sds_verifiy_chunk("dset7",HDF_CHUNK|HDF_COMP,3,in_chunk_lengths) == -1)
+# if ( sds_verifiy_chunk("dset7",HDF_CHUNK|HDF_COMP,3,in_chunk_lengths) == -1)
# goto out;
-
-
+
+
#-------------------------------------------------------------------------
- # test2:
+ # test2:
# RLE
#-------------------------------------------------------------------------
#
ADD_H4_TEST(RLE "TEST" ${HREPACK_FILE1} -t dset4:RLE -c dset4:10x8)
-
-# if ( sds_verifiy_comp("dset4",COMP_CODE_RLE, 0) == -1)
+
+# if ( sds_verifiy_comp("dset4",COMP_CODE_RLE, 0) == -1)
# goto out;
-# if ( sds_verifiy_chunk("dset4",HDF_CHUNK|HDF_COMP,2,in_chunk_lengths) == -1)
+# if ( sds_verifiy_chunk("dset4",HDF_CHUNK|HDF_COMP,2,in_chunk_lengths) == -1)
# goto out;
-
+
#-------------------------------------------------------------------------
- # test3:
+ # test3:
# SDS SELECTED with GZIP, chunking SELECTED
#-------------------------------------------------------------------------
#
ADD_H4_TEST(SDSGZIP_CHUNK "TEST" ${HREPACK_FILE1} -t "dset4:GZIP 6" -c dset4:10x8)
-
-# if ( sds_verifiy_comp("dset4",COMP_CODE_DEFLATE, 6) == -1)
+
+# if ( sds_verifiy_comp("dset4",COMP_CODE_DEFLATE, 6) == -1)
# goto out;
-# if ( sds_verifiy_chunk("dset4",HDF_CHUNK|HDF_COMP,2,in_chunk_lengths) == -1)
+# if ( sds_verifiy_chunk("dset4",HDF_CHUNK|HDF_COMP,2,in_chunk_lengths) == -1)
# goto out;
-
-
+
+
#-------------------------------------------------------------------------
- # test4:
+ # test4:
# SDS SELECTED with SZIP, chunking SELECTED
#-------------------------------------------------------------------------
#
@@ -167,31 +167,31 @@
ADD_H4_TEST(SDSSZIP_CHUNK "SKIP" ${HREPACK_FILE1} -c dset4:10x8)
endif (H4_HAVE_SZIP_ENCODER)
#if defined (H4_HAVE_LIBSZ)
-# if (SZ_encoder_enabled())
+# if (SZ_encoder_enabled())
# {
-#
-# if ( sds_verifiy_comp("dset4",COMP_CODE_SZIP, 0) == -1)
+#
+# if ( sds_verifiy_comp("dset4",COMP_CODE_SZIP, 0) == -1)
# goto out;
-# if ( sds_verifiy_chunk("dset4",HDF_CHUNK|HDF_COMP,2,in_chunk_lengths) == -1)
+# if ( sds_verifiy_chunk("dset4",HDF_CHUNK|HDF_COMP,2,in_chunk_lengths) == -1)
# goto out;
-
+
#-------------------------------------------------------------------------
- # test4:
+ # test4:
# SDS SELECTED with NONE, chunking SELECTED NONE
#-------------------------------------------------------------------------
#
ADD_H4_TEST(SDSNONE_CHUNKNONE "TEST" ${HREPACK_FILE1} -t dset_chunk_comp:NONE -t dset_chunk:NONE -c dset_chunk_comp:NONE -c dset_chunk:NONE)
-
-# if ( sds_verifiy_comp("dset_chunk_comp",COMP_CODE_NONE, 0) == -1)
+
+# if ( sds_verifiy_comp("dset_chunk_comp",COMP_CODE_NONE, 0) == -1)
# goto out;
-# if ( sds_verifiy_comp("dset_chunk",COMP_CODE_NONE, 0) == -1)
+# if ( sds_verifiy_comp("dset_chunk",COMP_CODE_NONE, 0) == -1)
# goto out;
-# if ( sds_verifiy_chunk("dset_chunk_comp",HDF_NONE,0,0) == -1)
+# if ( sds_verifiy_chunk("dset_chunk_comp",HDF_NONE,0,0) == -1)
# goto out;
-# if ( sds_verifiy_chunk("dset_chunk",HDF_NONE,0,0) == -1)
+# if ( sds_verifiy_chunk("dset_chunk",HDF_NONE,0,0) == -1)
# goto out;
-
-
+
+
#-------------------------------------------------------------------------
# test5:
# SDS SELECTED with all types, chunking SELECTED
@@ -202,29 +202,29 @@
else (H4_HAVE_SZIP_ENCODER)
ADD_H4_TEST(SDS_CHUNK "TEST" ${HREPACK_FILE1} -t "dset4:GZIP 9" -t dset5:RLE -t "dset6:HUFF 2" -c dset4:10x8 -c dset5:10x8 -c dset6:10x8)
endif (H4_HAVE_SZIP_ENCODER)
-
-# if ( sds_verifiy_comp("dset4",COMP_CODE_DEFLATE, 9) == -1)
+
+# if ( sds_verifiy_comp("dset4",COMP_CODE_DEFLATE, 9) == -1)
# goto out;
-# if ( sds_verifiy_comp("dset5",COMP_CODE_RLE, 0) == -1)
+# if ( sds_verifiy_comp("dset5",COMP_CODE_RLE, 0) == -1)
# goto out;
-# if ( sds_verifiy_comp("dset6",COMP_CODE_SKPHUFF, 2) == -1)
+# if ( sds_verifiy_comp("dset6",COMP_CODE_SKPHUFF, 2) == -1)
# goto out;
#if defined (H4_HAVE_LIBSZ)
# if (SZ_encoder_enabled()) {
-# if ( sds_verifiy_comp("dset7",COMP_CODE_SZIP, 0) == -1)
+# if ( sds_verifiy_comp("dset7",COMP_CODE_SZIP, 0) == -1)
# goto out;
# }
#endif
-# if ( sds_verifiy_chunk("dset4",HDF_CHUNK|HDF_COMP,2,in_chunk_lengths) == -1)
+# if ( sds_verifiy_chunk("dset4",HDF_CHUNK|HDF_COMP,2,in_chunk_lengths) == -1)
# goto out;
-# if ( sds_verifiy_chunk("dset5",HDF_CHUNK|HDF_COMP,2,in_chunk_lengths) == -1)
+# if ( sds_verifiy_chunk("dset5",HDF_CHUNK|HDF_COMP,2,in_chunk_lengths) == -1)
# goto out;
-# if ( sds_verifiy_chunk("dset6",HDF_CHUNK|HDF_COMP,2,in_chunk_lengths) == -1)
+# if ( sds_verifiy_chunk("dset6",HDF_CHUNK|HDF_COMP,2,in_chunk_lengths) == -1)
# goto out;
-
-
+
+
#-------------------------------------------------------------------------
- # test6:
+ # test6:
# SDS SELECTED with all types, no chunking
#-------------------------------------------------------------------------
#
@@ -233,71 +233,70 @@
else (H4_HAVE_SZIP_ENCODER)
ADD_H4_TEST(SEL_NOCHUNK "TEST" ${HREPACK_FILE1} -t "dset4:GZIP 9" -t dset5:RLE -t "dset6:HUFF 2")
endif (H4_HAVE_SZIP_ENCODER)
-
-# if ( sds_verifiy_comp("dset4",COMP_CODE_DEFLATE, 9) == -1)
+
+# if ( sds_verifiy_comp("dset4",COMP_CODE_DEFLATE, 9) == -1)
# goto out;
-# if ( sds_verifiy_comp("dset5",COMP_CODE_RLE, 0) == -1)
+# if ( sds_verifiy_comp("dset5",COMP_CODE_RLE, 0) == -1)
# goto out;
-# if ( sds_verifiy_comp("dset6",COMP_CODE_SKPHUFF, 2) == -1)
+# if ( sds_verifiy_comp("dset6",COMP_CODE_SKPHUFF, 2) == -1)
# goto out;
#if defined (H4_HAVE_LIBSZ)
# if (SZ_encoder_enabled()) {
-# if ( sds_verifiy_comp("dset7",COMP_CODE_SZIP, 0) == -1)
+# if ( sds_verifiy_comp("dset7",COMP_CODE_SZIP, 0) == -1)
# goto out;
# }
#endif
-
-
+
+
#-------------------------------------------------------------------------
- # test7:
+ # test7:
# compressing SDS ALL, chunking SELECTED NONE
#-------------------------------------------------------------------------
#
ADD_H4_TEST(COMPALL_CHUNKNONE "TEST" ${HREPACK_FILE1} -t "*:GZIP 1" -c dset_chunk_comp:NONE -c dset_chunk:NONE)
-
-# if ( sds_verifiy_comp_all(COMP_CODE_DEFLATE, 1) == -1)
+
+# if ( sds_verifiy_comp_all(COMP_CODE_DEFLATE, 1) == -1)
# goto out;
-# if ( sds_verifiy_chunk("dset_chunk_comp",HDF_NONE,0,0) == -1)
+# if ( sds_verifiy_chunk("dset_chunk_comp",HDF_NONE,0,0) == -1)
# goto out;
-# if ( sds_verifiy_chunk("dset_chunk",HDF_NONE,0,0) == -1)
+# if ( sds_verifiy_chunk("dset_chunk",HDF_NONE,0,0) == -1)
# goto out;
-
+
#-------------------------------------------------------------------------
# test8:
# no compressing, chunking ALL
#-------------------------------------------------------------------------
#
ADD_H4_TEST(NOCOMP_CHUNKALL "TEST" ${HREPACK_FILE1} -c *:10x8)
-
-# if ( sds_verifiy_chunk_all(HDF_CHUNK,2,in_chunk_lengths,"dset7") == -1)
+
+# if ( sds_verifiy_chunk_all(HDF_CHUNK,2,in_chunk_lengths,"dset7") == -1)
# goto out;
-
-
+
+
#-------------------------------------------------------------------------
- # test9:
+ # test9:
# compressing SDS ALL with GZIP
#-------------------------------------------------------------------------
#
ADD_H4_TEST(GZIP "TEST" ${HREPACK_FILE1} -t "*:GZIP 1")
-
-# if ( sds_verifiy_comp_all(COMP_CODE_DEFLATE, 1) == -1)
+
+# if ( sds_verifiy_comp_all(COMP_CODE_DEFLATE, 1) == -1)
# goto out;
-
-
+
+
#-------------------------------------------------------------------------
- # test10:
+ # test10:
# repack a big file using hyperslab reading/writing
#-------------------------------------------------------------------------
#
ADD_H4_TEST(HYPERSLAB "TEST" ${HREPACK_FILE2})
-
+
#-------------------------------------------------------------------------
- # test11:
+ # test11:
# repack a file with vgroups
#-------------------------------------------------------------------------
#
ADD_H4_TEST(VGROUP "TEST" ${HREPACK_FILE3})
-
+
# if (vg_verifygrpdep(HREPACK_FILE3,HREPACK_FILE3_OUT) != 0 )
# goto out;
-
\ No newline at end of file
diff --git a/mfhdf/hrepack/Makefile.in b/mfhdf/hrepack/Makefile.in
index d174cca..a7c117b 100644
--- a/mfhdf/hrepack/Makefile.in
+++ b/mfhdf/hrepack/Makefile.in
@@ -91,7 +91,20 @@ check_PROGRAMS = hrepack_check$(EXEEXT) test_hrepack$(EXEEXT)
TESTS = $(am__EXEEXT_1) $(TEST_SCRIPT)
subdir = mfhdf/hrepack
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -427,12 +440,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -475,11 +503,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/mfhdf/hrepack/hrepack.c b/mfhdf/hrepack/hrepack.c
index 0726ee4..7295210 100644
--- a/mfhdf/hrepack/hrepack.c
+++ b/mfhdf/hrepack/hrepack.c
@@ -98,7 +98,7 @@ int hrepack_addcomp(const char* str,
}
/* initialize parse struct to FAIL */
- memset(&comp,FAIL,sizeof(comp_info_t));
+ HDmemset(&comp,FAIL,sizeof(comp_info_t));
/* parse the -t option */
if ((obj_list = parse_comp(str,&n_objs,&comp)) == NULL)
@@ -107,7 +107,7 @@ int hrepack_addcomp(const char* str,
/* searh for the "*" all objects character */
for (i = 0; i < n_objs; i++)
{
- if (strcmp("*",obj_list[i].obj)==0)
+ if (HDstrcmp("*",obj_list[i].obj)==0)
{
/* if we are compressing all set the global comp type */
options->all_comp=1;
@@ -179,7 +179,7 @@ int hrepack_addchunk(const char* str,
/* searh for the "*" all objects character */
for (i = 0; i < n_objs; i++)
{
- if (strcmp("*",obj_list[i].obj)==0)
+ if (HDstrcmp("*",obj_list[i].obj)==0)
{
/* if we are chunking all set the global chunking type */
options->all_chunk=1;
@@ -225,7 +225,7 @@ out:
void hrepack_init (options_t *options,
int verbose)
{
- memset(options,0,sizeof(options_t));
+ HDmemset(options,0,sizeof(options_t));
options->threshold = 1024;
options->verbose = verbose;
options_table_init(&(options->op_tbl));
@@ -397,7 +397,7 @@ int read_info(const char *filename,
* comp
*-------------------------------------------------------------------------
*/
- if (strcmp(stype,"-t") == 0) {
+ if (HDstrcmp(stype,"-t") == 0) {
/* find begining of info */
i=0; c='0';
@@ -422,7 +422,7 @@ int read_info(const char *filename,
* chunk
*-------------------------------------------------------------------------
*/
- else if (strcmp(stype,"-c") == 0) {
+ else if (HDstrcmp(stype,"-c") == 0) {
/* find begining of info */
i=0; c='0';
diff --git a/mfhdf/hrepack/hrepack_an.c b/mfhdf/hrepack/hrepack_an.c
index 9a8fb95..767932f 100644
--- a/mfhdf/hrepack/hrepack_an.c
+++ b/mfhdf/hrepack/hrepack_an.c
@@ -204,19 +204,19 @@ int copy_an_data(int32 infile_id,
*/
if (is_label)
ann_length++;
-
- if ((buf = (char *)malloc((ann_length)*sizeof(int8)))==NULL ) {
+
+ if ((buf = (char *)HDmalloc((ann_length)*sizeof(int8)))==NULL ) {
printf( "Failed to get memory for AN %d of <%s>\n", i, path);
continue;
}
if(ANreadann(ann_id,buf,ann_length)==FAIL){
printf( "Failed to read AN %d of <%s>\n", i, path);
- if (buf) free(buf);
+ if (buf) HDfree(buf);
continue;
}
if(ANendaccess(ann_id)==FAIL){
printf( "Failed to end AN %d of <%s>\n", i, path);
- if (buf) free(buf);
+ if (buf) HDfree(buf);
continue;
}
/*-------------------------------------------------------------------------
@@ -234,10 +234,10 @@ int copy_an_data(int32 infile_id,
}
if(ANendaccess(ann_out)==FAIL){
printf( "Failed to end AN %d of <%s>\n", i, path);
- if (buf) free(buf);
+ if (buf) HDfree(buf);
continue;
}
- if (buf) free(buf);
+ if (buf) HDfree(buf);
}
/* Terminate access to the AN interface */
diff --git a/mfhdf/hrepack/hrepack_dim.c b/mfhdf/hrepack/hrepack_dim.c
index 048139f..aca83d0 100644
--- a/mfhdf/hrepack/hrepack_dim.c
+++ b/mfhdf/hrepack/hrepack_dim.c
@@ -724,8 +724,8 @@ out:
}
if (buf)
- free(buf);
-
+ HDfree(buf);
+
return ret;
}
@@ -769,8 +769,8 @@ static void match_dim_table_add (match_dim_table_t *mdim_tbl,
if (mdim_tbl->nobjs == mdim_tbl->size) {
mdim_tbl->size *= 2;
- mdim_tbl->objs = (match_dim_name_t*)realloc(mdim_tbl->objs, mdim_tbl->size * sizeof(match_dim_name_t));
-
+ mdim_tbl->objs = (match_dim_name_t*)HDrealloc(mdim_tbl->objs, mdim_tbl->size * sizeof(match_dim_name_t));
+
for (i = mdim_tbl->nobjs; i < mdim_tbl->size; i++) {
mdim_tbl->objs[i].ref = -1;
mdim_tbl->objs[i].flags[0] = mdim_tbl->objs[i].flags[1] = -1;
@@ -779,7 +779,7 @@ static void match_dim_table_add (match_dim_table_t *mdim_tbl,
i = mdim_tbl->nobjs++;
mdim_tbl->objs[i].ref = ref;
- strcpy(mdim_tbl->objs[i].dim_name,dim_name);
+ HDstrcpy(mdim_tbl->objs[i].dim_name,dim_name);
mdim_tbl->objs[i].flags[0] = flags[0];
mdim_tbl->objs[i].flags[1] = flags[1];
}
@@ -802,12 +802,12 @@ static void match_dim_table_add (match_dim_table_t *mdim_tbl,
static void match_dim_table_init( match_dim_table_t **tbl )
{
int i;
- match_dim_table_t *mdim_tbl = (match_dim_table_t*) malloc(sizeof(match_dim_table_t));
-
+ match_dim_table_t *mdim_tbl = (match_dim_table_t*)HDmalloc(sizeof(match_dim_table_t));
+
mdim_tbl->size = 20;
mdim_tbl->nobjs = 0;
- mdim_tbl->objs = (match_dim_name_t*) malloc(mdim_tbl->size * sizeof(match_dim_name_t));
-
+ mdim_tbl->objs = (match_dim_name_t*)HDmalloc(mdim_tbl->size * sizeof(match_dim_name_t));
+
for (i = 0; i < mdim_tbl->size; i++) {
mdim_tbl->objs[i].ref = -1;
mdim_tbl->objs[i].flags[0] = mdim_tbl->objs[i].flags[1] = -1;
@@ -834,8 +834,8 @@ static void match_dim_table_init( match_dim_table_t **tbl )
static void match_dim_table_free( match_dim_table_t *mdim_tbl )
{
- free(mdim_tbl->objs);
- free(mdim_tbl);
+ HDfree(mdim_tbl->objs);
+ HDfree(mdim_tbl);
}
@@ -860,8 +860,8 @@ void dim_table_add(dim_table_t *dim_tbl, int ref, char* name)
if (dim_tbl->nobjs == dim_tbl->size) {
dim_tbl->size *= 2;
- dim_tbl->objs = (dim_name_t*)realloc(dim_tbl->objs, dim_tbl->size * sizeof(dim_name_t));
-
+ dim_tbl->objs = (dim_name_t*)HDrealloc(dim_tbl->objs, dim_tbl->size * sizeof(dim_name_t));
+
for (i = dim_tbl->nobjs; i < dim_tbl->size; i++) {
dim_tbl->objs[i].ref = -1;
}
@@ -869,7 +869,7 @@ void dim_table_add(dim_table_t *dim_tbl, int ref, char* name)
i = dim_tbl->nobjs++;
dim_tbl->objs[i].ref = ref;
- strcpy(dim_tbl->objs[i].dim_name,name);
+ HDstrcpy(dim_tbl->objs[i].dim_name,name);
}
@@ -891,12 +891,12 @@ void dim_table_add(dim_table_t *dim_tbl, int ref, char* name)
void dim_table_init( dim_table_t **tbl )
{
int i;
- dim_table_t* dim_tbl = (dim_table_t*) malloc(sizeof(dim_table_t));
-
+ dim_table_t* dim_tbl = (dim_table_t*)HDmalloc(sizeof(dim_table_t));
+
dim_tbl->size = 20;
dim_tbl->nobjs = 0;
- dim_tbl->objs = (dim_name_t*) malloc(dim_tbl->size * sizeof(dim_name_t));
-
+ dim_tbl->objs = (dim_name_t*)HDmalloc(dim_tbl->size * sizeof(dim_name_t));
+
for (i = 0; i < dim_tbl->size; i++) {
dim_tbl->objs[i].ref = -1;
}
@@ -920,7 +920,7 @@ void dim_table_init( dim_table_t **tbl )
void dim_table_free( dim_table_t *dim_tbl )
{
- free(dim_tbl->objs);
- free(dim_tbl);
+ HDfree(dim_tbl->objs);
+ HDfree(dim_tbl);
}
diff --git a/mfhdf/hrepack/hrepack_gr.c b/mfhdf/hrepack/hrepack_gr.c
index bf46ae4..148102d 100644
--- a/mfhdf/hrepack/hrepack_gr.c
+++ b/mfhdf/hrepack/hrepack_gr.c
@@ -366,7 +366,7 @@ int copy_gr(int32 infile_id,
/* check inspection mode */
if ( options->trip==0 ) {
- if (path) free(path);
+ if (path) HDfree(path);
if (GRendaccess(ri_id)==FAIL){
printf( "Could not close GR <%s>\n",path);
return-1;
@@ -382,7 +382,7 @@ int copy_gr(int32 infile_id,
if ((buf = (VOIDP) HDmalloc(data_size)) == NULL) {
printf( "Failed to allocate %ld elements of size %ld\n", nelms, eltsz);
GRendaccess(ri_id);
- if (path) free(path);
+ if (path) HDfree(path);
return-1;
}
@@ -391,7 +391,7 @@ int copy_gr(int32 infile_id,
if ( GRreqimageil(ri_id, interlace_mode) == FAIL ){
printf( "Could not set interlace for GR <%s>\n", path);
GRendaccess(ri_id);
- if (path) free(path);
+ if (path) HDfree(path);
return-1;
}
@@ -399,7 +399,7 @@ int copy_gr(int32 infile_id,
if (GRreadimage (ri_id, start, NULL, edges, buf) == FAIL) {
printf( "Could not read GR <%s>\n", path);
GRendaccess(ri_id);
- if (path) free(path);
+ if (path) HDfree(path);
return-1;
}
@@ -592,9 +592,9 @@ out:
printf( "Failed to close SDS <%s>\n", path);
if (path)
- free(path);
+ HDfree(path);
if (buf)
- free(buf);
+ HDfree(buf);
return ret;
@@ -655,7 +655,7 @@ int copy_gr_attrs(int32 ri_id,
}
if (attr_buf)
- free(attr_buf);
+ HDfree(attr_buf);
}
return 1;
diff --git a/mfhdf/hrepack/hrepack_list.c b/mfhdf/hrepack/hrepack_list.c
index 6b23b81..c807cae 100644
--- a/mfhdf/hrepack/hrepack_list.c
+++ b/mfhdf/hrepack/hrepack_list.c
@@ -401,7 +401,7 @@ int list_vg(int32 infile_id,
* use the nlones returned to allocate sufficient space for the
* buffer ref_array to hold the reference numbers of all lone vgroups,
*/
- ref_array = (int32 *) malloc(sizeof(int32) * nlones);
+ ref_array = (int32 *)HDmalloc(sizeof(int32) * nlones);
/*
* and call Vlone again to retrieve the reference numbers into
@@ -470,7 +470,7 @@ int list_vg(int32 infile_id,
}
continue;
}
- if(strcmp(vg_name,GR_NAME)==0)
+ if(HDstrcmp(vg_name,GR_NAME)==0)
{
if (Vdetach (vg_id)==FAIL)
{
@@ -532,8 +532,8 @@ int list_vg(int32 infile_id,
ntagrefs = Vntagrefs(vg_id);
if ( ntagrefs > 0 )
{
- tags = (int32 *) malloc(sizeof(int32) * ntagrefs);
- refs = (int32 *) malloc(sizeof(int32) * ntagrefs);
+ tags = (int32 *)HDmalloc(sizeof(int32) * ntagrefs);
+ refs = (int32 *)HDmalloc(sizeof(int32) * ntagrefs);
if (Vgettagrefs(vg_id, tags, refs, ntagrefs)<0)
goto out;
@@ -555,9 +555,9 @@ int list_vg(int32 infile_id,
goto out;
}
- free (tags);
+ HDfree(tags);
tags=NULL;
- free (refs);
+ HDfree(refs);
refs=NULL;
}
@@ -575,14 +575,14 @@ int list_vg(int32 infile_id,
}
}
- free (vg_name);
+ HDfree(vg_name);
} /* for nlones */
/* free the space allocated */
if (ref_array!=NULL)
- free (ref_array);
+ HDfree(ref_array);
} /* if nlones */
@@ -616,11 +616,11 @@ out:
/* free the space allocated */
if (ref_array!=NULL)
- free (ref_array);
+ HDfree(ref_array);
if (tags!=NULL)
- free (tags);
+ HDfree(tags);
if (refs!=NULL)
- free (refs);
+ HDfree(refs);
return FAIL;
@@ -727,7 +727,7 @@ int vgroup_insert(int32 infile_id,
}
continue;
}
- if(strcmp(vg_name,GR_NAME)==0)
+ if(HDstrcmp(vg_name,GR_NAME)==0)
{
if (Vdetach (vg_id)==FAIL)
{
@@ -824,8 +824,8 @@ int vgroup_insert(int32 infile_id,
ntagrefs = Vntagrefs(vg_id);
if ( ntagrefs > 0 )
{
- tags = (int32 *) malloc(sizeof(int32) * ntagrefs);
- refs = (int32 *) malloc(sizeof(int32) * ntagrefs);
+ tags = (int32 *)HDmalloc(sizeof(int32) * ntagrefs);
+ refs = (int32 *)HDmalloc(sizeof(int32) * ntagrefs);
if (Vgettagrefs(vg_id, tags, refs, ntagrefs)<0)
goto out;
/* recurse */
@@ -847,15 +847,15 @@ int vgroup_insert(int32 infile_id,
options)<0) {
goto out;
}
- free (tags);
+ HDfree(tags);
tags=NULL;
- free (refs);
+ HDfree(refs);
refs=NULL;
} /* ntagrefs > 0 */
if (path)
- free(path);
+ HDfree(path);
} /* check if already visited */
@@ -952,9 +952,9 @@ int vgroup_insert(int32 infile_id,
out:
if (tags!=NULL)
- free (tags);
+ HDfree(tags);
if (refs!=NULL)
- free (refs);
+ HDfree(refs);
return FAIL;
}
@@ -1160,7 +1160,7 @@ int list_vs(int32 infile_id,
* use the nlones returned to allocate sufficient space for the
* buffer ref_array to hold the reference numbers of all lone vgroups,
*/
- ref_array = (int32 *) malloc(sizeof(int32) * nlones);
+ ref_array = (int32 *)HDmalloc(sizeof(int32) * nlones);
/*
* and call VSlone again to retrieve the reference numbers into
@@ -1198,7 +1198,7 @@ int list_vs(int32 infile_id,
/* free the space allocated */
if (ref_array)
{
- free (ref_array);
+ HDfree(ref_array);
ref_array = NULL;
}
} /* if */
@@ -1232,7 +1232,7 @@ out:
/* free the space allocated */
if (ref_array!=NULL)
- free (ref_array);
+ HDfree(ref_array);
return FAIL;
}
@@ -1358,7 +1358,7 @@ int list_an(int32 infile_id,
ann_length = ANannlen (ann_id);
/* Allocate space for the buffer to hold the data label text */
- ann_buf = malloc ((ann_length+1) * sizeof (char));
+ ann_buf = HDmalloc((ann_length+1) * sizeof (char));
/*
* Read and display the file label. Note that the size of the buffer,
@@ -1394,7 +1394,7 @@ int list_an(int32 infile_id,
/* Free the space allocated for the annotation buffer */
if (ann_buf)
- free (ann_buf);
+ HDfree(ann_buf);
}
/*-------------------------------------------------------------------------
@@ -1411,7 +1411,7 @@ int list_an(int32 infile_id,
ann_length = ANannlen (ann_id);
/* Allocate space for the buffer to hold the data label text */
- ann_buf = malloc ((ann_length+1) * sizeof (char));
+ ann_buf = HDmalloc((ann_length+1) * sizeof (char));
if (ANreadann (ann_id, ann_buf, ann_length+1)==FAIL)
{
@@ -1439,7 +1439,7 @@ int list_an(int32 infile_id,
/* Free the space allocated for the annotation buffer */
if (ann_buf)
{
- free (ann_buf);
+ HDfree(ann_buf);
ann_buf = NULL;
}
}
@@ -1460,7 +1460,7 @@ out:
printf( "Could not end AN\n");
}
if (ann_buf!=NULL)
- free (ann_buf);
+ HDfree(ann_buf);
return FAIL;
diff --git a/mfhdf/hrepack/hrepack_lsttable.c b/mfhdf/hrepack/hrepack_lsttable.c
index 5dcde48..4391b1e 100644
--- a/mfhdf/hrepack/hrepack_lsttable.c
+++ b/mfhdf/hrepack/hrepack_lsttable.c
@@ -68,7 +68,7 @@ void list_table_add(list_table_t *list_tbl, int tag, int ref, char* path)
if (list_tbl->nobjs == list_tbl->size)
{
list_tbl->size *= 2;
- list_tbl->objs = (obj_info_t*)realloc(list_tbl->objs, list_tbl->size * sizeof(obj_info_t));
+ list_tbl->objs = (obj_info_t*)HDrealloc(list_tbl->objs, list_tbl->size * sizeof(obj_info_t));
for (i = list_tbl->nobjs; i < list_tbl->size; i++)
{
@@ -107,11 +107,11 @@ void list_table_add(list_table_t *list_tbl, int tag, int ref, char* path)
void list_table_init( list_table_t **tbl )
{
int i;
- list_table_t* list_tbl = (list_table_t*) malloc(sizeof(list_table_t));
+ list_table_t* list_tbl = (list_table_t*)HDmalloc(sizeof(list_table_t));
list_tbl->size = 20;
list_tbl->nobjs = 0;
- list_tbl->objs = (obj_info_t*) malloc(list_tbl->size * sizeof(obj_info_t));
+ list_tbl->objs = (obj_info_t*)HDmalloc(list_tbl->size * sizeof(obj_info_t));
for (i = 0; i < list_tbl->size; i++)
{
@@ -144,10 +144,10 @@ void list_table_free( list_table_t *list_tbl )
for (i = 0; i < list_tbl->nobjs; i++)
{
assert(list_tbl->objs[i].path);
- free(list_tbl->objs[i].path);
+ HDfree(list_tbl->objs[i].path);
}
- free(list_tbl->objs);
- free(list_tbl);
+ HDfree(list_tbl->objs);
+ HDfree(list_tbl);
}
@@ -172,7 +172,7 @@ const char* list_table_check(list_table_t *list_tbl, char*obj_name)
for (i = 0; i < list_tbl->nobjs; i++)
{
- if (strcmp(list_tbl->objs[i].path,obj_name)==0)
+ if (HDstrcmp(list_tbl->objs[i].path,obj_name)==0)
{
/* found the name; check if it is an SDS or Image */
tag=list_tbl->objs[i].tag;
diff --git a/mfhdf/hrepack/hrepack_opttable.c b/mfhdf/hrepack/hrepack_opttable.c
index 6b2c226..946ad32 100644
--- a/mfhdf/hrepack/hrepack_opttable.c
+++ b/mfhdf/hrepack/hrepack_opttable.c
@@ -32,14 +32,14 @@
void options_table_init( options_table_t **tbl )
{
int i;
- options_table_t* op_tbl = (options_table_t*) malloc(sizeof(options_table_t));
+ options_table_t* op_tbl = (options_table_t*)HDmalloc(sizeof(options_table_t));
op_tbl->size = 3;
op_tbl->nelems = 0;
- op_tbl->objs = (pack_info_t*) malloc(op_tbl->size * sizeof(pack_info_t));
+ op_tbl->objs = (pack_info_t*)HDmalloc(op_tbl->size * sizeof(pack_info_t));
for (i = 0; i < op_tbl->size; i++) {
- strcpy(op_tbl->objs[i].objpath,"\0");
+ HDstrcpy(op_tbl->objs[i].objpath,"\0");
op_tbl->objs[i].comp.info = -1;
op_tbl->objs[i].comp.type = COMP_CODE_NONE;
op_tbl->objs[i].chunk.rank = -1;
@@ -61,9 +61,9 @@ void options_table_init( options_table_t **tbl )
void options_table_free( options_table_t *op_tbl )
{
if (op_tbl->objs!=NULL)
- free(op_tbl->objs);
+ HDfree(op_tbl->objs);
if (op_tbl!=NULL)
- free(op_tbl);
+ HDfree(op_tbl);
}
/*-------------------------------------------------------------------------
@@ -86,9 +86,9 @@ int options_add_chunk(obj_list_t *obj_list,
if (op_tbl->nelems+n_objs >= op_tbl->size) {
op_tbl->size += n_objs;
- op_tbl->objs = (pack_info_t*)realloc(op_tbl->objs, op_tbl->size * sizeof(pack_info_t));
+ op_tbl->objs = (pack_info_t*)HDrealloc(op_tbl->objs, op_tbl->size * sizeof(pack_info_t));
for (i = op_tbl->nelems; i < op_tbl->size; i++) {
- strcpy(op_tbl->objs[i].objpath,"\0");
+ HDstrcpy(op_tbl->objs[i].objpath,"\0");
op_tbl->objs[i].comp.info = -1;
op_tbl->objs[i].comp.type = COMP_CODE_NONE;
op_tbl->objs[i].chunk.rank = -1;
@@ -105,7 +105,7 @@ int options_add_chunk(obj_list_t *obj_list,
for (i = 0; i < op_tbl->nelems; i++)
{
/*already on the table */
- if (strcmp(obj_list[j].obj,op_tbl->objs[i].objpath)==0)
+ if (HDstrcmp(obj_list[j].obj,op_tbl->objs[i].objpath)==0)
{
/* already chunk info inserted for this one; exit */
if (op_tbl->objs[i].chunk.rank>0)
@@ -130,7 +130,7 @@ int options_add_chunk(obj_list_t *obj_list,
/* keep the grow in a temp var */
I = op_tbl->nelems + added;
added++;
- strcpy(op_tbl->objs[I].objpath,obj_list[j].obj);
+ HDstrcpy(op_tbl->objs[I].objpath,obj_list[j].obj);
op_tbl->objs[I].chunk.rank = chunk_rank;
for (k = 0; k < chunk_rank; k++)
op_tbl->objs[I].chunk.chunk_lengths[k] = chunk_lengths[k];
@@ -146,7 +146,7 @@ int options_add_chunk(obj_list_t *obj_list,
{
I = op_tbl->nelems + added;
added++;
- strcpy(op_tbl->objs[I].objpath,obj_list[j].obj);
+ HDstrcpy(op_tbl->objs[I].objpath,obj_list[j].obj);
op_tbl->objs[I].chunk.rank = chunk_rank;
for (k = 0; k < chunk_rank; k++)
op_tbl->objs[I].chunk.chunk_lengths[k] = chunk_lengths[k];
@@ -179,9 +179,9 @@ int options_add_comp(obj_list_t *obj_list,
if (op_tbl->nelems+n_objs >= op_tbl->size) {
op_tbl->size += n_objs;
- op_tbl->objs = (pack_info_t*)realloc(op_tbl->objs, op_tbl->size * sizeof(pack_info_t));
+ op_tbl->objs = (pack_info_t*)HDrealloc(op_tbl->objs, op_tbl->size * sizeof(pack_info_t));
for (i = op_tbl->nelems; i < op_tbl->size; i++) {
- strcpy(op_tbl->objs[i].objpath,"\0");
+ HDstrcpy(op_tbl->objs[i].objpath,"\0");
op_tbl->objs[i].comp.info = -1;
op_tbl->objs[i].comp.type = COMP_CODE_NONE;
op_tbl->objs[i].chunk.rank = -1;
@@ -198,7 +198,7 @@ int options_add_comp(obj_list_t *obj_list,
for (i = 0; i < op_tbl->nelems; i++)
{
/*already on the table */
- if (strcmp(obj_list[j].obj,op_tbl->objs[i].objpath)==0)
+ if (HDstrcmp(obj_list[j].obj,op_tbl->objs[i].objpath)==0)
{
/* already COMP info inserted for this one; exit */
if (op_tbl->objs[i].comp.type>0)
@@ -221,7 +221,7 @@ int options_add_comp(obj_list_t *obj_list,
/* keep the grow in a temp var */
I = op_tbl->nelems + added;
added++;
- strcpy(op_tbl->objs[I].objpath,obj_list[j].obj);
+ HDstrcpy(op_tbl->objs[I].objpath,obj_list[j].obj);
op_tbl->objs[I].comp = comp;
}
} /* j */
@@ -235,7 +235,7 @@ int options_add_comp(obj_list_t *obj_list,
{
I = op_tbl->nelems + added;
added++;
- strcpy(op_tbl->objs[I].objpath,obj_list[j].obj);
+ HDstrcpy(op_tbl->objs[I].objpath,obj_list[j].obj);
op_tbl->objs[I].comp = comp;
}
}
@@ -263,7 +263,7 @@ pack_info_t* options_get_object(char *path,
for ( i = 0; i < op_tbl->nelems; i++)
{
/* found it */
- if (strcmp(op_tbl->objs[i].objpath,path)==0)
+ if (HDstrcmp(op_tbl->objs[i].objpath,path)==0)
{
return (&op_tbl->objs[i]);
}
diff --git a/mfhdf/hrepack/hrepack_parse.c b/mfhdf/hrepack/hrepack_parse.c
index 95d87e6..ed6a0e5 100644
--- a/mfhdf/hrepack/hrepack_parse.c
+++ b/mfhdf/hrepack/hrepack_parse.c
@@ -88,8 +88,8 @@ obj_list_t* parse_comp(const char *str,
if ( c==',' || j==end_obj-1)
{
if ( c==',') obj[k]='\0'; else obj[k+1]='\0';
- strcpy(obj_list[n].obj,obj);
- memset(obj,0,sizeof(obj));
+ HDstrcpy(obj_list[n].obj,obj);
+ HDmemset(obj,0,sizeof(obj));
n++;
k=-1;
}
@@ -124,7 +124,7 @@ obj_list_t* parse_comp(const char *str,
SZIP=8,NN
*/
- if (strcmp(scomp,"SZIP")==0)
+ if (HDstrcmp(scomp,"SZIP")==0)
{
l=-1; /* mask index check */
for ( m=0,u=i+1; u<len; u++,m++)
@@ -151,9 +151,9 @@ obj_list_t* parse_comp(const char *str,
smask[l]='\0';
i=len-1; /* end */
(*n_objs)--; /* we counted an extra ',' */
- if (strcmp(smask,"NN")==0)
+ if (HDstrcmp(smask,"NN")==0)
comp->szip_mode=NN_MODE;
- else if (strcmp(smask,"EC")==0)
+ else if (HDstrcmp(smask,"EC")==0)
comp->szip_mode=EC_MODE;
else
{
@@ -384,8 +384,8 @@ obj_list_t* parse_chunk(const char *str,
if ( c==',' || j==end_obj-1)
{
if ( c==',') obj[k]='\0'; else obj[k+1]='\0';
- strcpy(obj_list[n].obj,obj);
- memset(obj,0,sizeof(obj));
+ HDstrcpy(obj_list[n].obj,obj);
+ HDmemset(obj,0,sizeof(obj));
n++;
k=-1;
}
@@ -426,7 +426,7 @@ obj_list_t* parse_chunk(const char *str,
else if (i==len-1) { /*no more parameters */
sdim[k]='\0';
k=0;
- if (strcmp(sdim,"NONE")==0)
+ if (HDstrcmp(sdim,"NONE")==0)
{
*chunk_rank=-2;
}
diff --git a/mfhdf/hrepack/hrepack_sds.c b/mfhdf/hrepack/hrepack_sds.c
index 4d95204..00a6c15 100644
--- a/mfhdf/hrepack/hrepack_sds.c
+++ b/mfhdf/hrepack/hrepack_sds.c
@@ -429,7 +429,7 @@ int copy_sds(int32 sd_in,
- if (path) free(path);
+ if (path) HDfree(path);
return SUCCEED;
}
@@ -650,7 +650,7 @@ int copy_sds(int32 sd_in,
sm_nelmts = sm_nbytes / p_type_nbytes;
/* the stripmine loop */
- memset(hs_offset, 0, sizeof hs_offset);
+ HDmemset(hs_offset, 0, sizeof hs_offset);
for (elmtno = 0; elmtno < p_nelmts; elmtno += hs_nelmts)
{
@@ -702,7 +702,7 @@ int copy_sds(int32 sd_in,
/* free */
if (sm_buf!=NULL)
{
- free(sm_buf);
+ HDfree(sm_buf);
sm_buf=NULL;
}
@@ -786,7 +786,7 @@ int copy_sds(int32 sd_in,
goto out;
}
}
- free(dim_buf);
+ HDfree(dim_buf);
}
}
@@ -857,9 +857,9 @@ int copy_sds(int32 sd_in,
if (path)
- free(path);
+ HDfree(path);
if (buf)
- free(buf);
+ HDfree(buf);
return SUCCEED;
@@ -873,9 +873,9 @@ out:
printf( "Failed to close SDS <%s>\n", path);
}
if (path)
- free(path);
+ HDfree(path);
if (buf)
- free(buf);
+ HDfree(buf);
return FAIL;
@@ -937,7 +937,7 @@ int copy_sds_attrs(int32 id_in,
}
if (attr_buf)
- free(attr_buf);
+ HDfree(attr_buf);
}
return SUCCEED;
@@ -946,7 +946,7 @@ int copy_sds_attrs(int32 id_in,
out:
if (attr_buf)
- free(attr_buf);
+ HDfree(attr_buf);
return FAIL;
diff --git a/mfhdf/hrepack/hrepack_vg.c b/mfhdf/hrepack/hrepack_vg.c
index df9e9d8..588451e 100644
--- a/mfhdf/hrepack/hrepack_vg.c
+++ b/mfhdf/hrepack/hrepack_vg.c
@@ -56,19 +56,19 @@ int copy_vgroup_attrs(int32 vg_in, int32 vg_out, char *path,options_t *options)
printf( "Failed to get attribute %d of <%s>\n", i, path);
continue;
}
- if ((buf = (char *)malloc( (size_t) (size * n_values) ))==NULL ) {
+ if ((buf = (char *)HDmalloc( (size_t) (size * n_values) ))==NULL ) {
printf( "Failed to get memory for attribute %d of <%s>\n", i, path);
continue;
}
if((Vgetattr2 (vg_in, i, buf))==FAIL){
printf( "Failed to get attribute %d of <%s>\n", i, path);
- if (buf) free(buf);
+ if (buf) HDfree(buf);
continue;
}
if((Vsetattr(vg_out, attr_name, data_type, n_values, buf))==FAIL){
printf( "Failed to set attribute %d of <%s>\n", i, path);
}
- if (buf) free(buf);
+ if (buf) HDfree(buf);
}
return 1;
}
diff --git a/mfhdf/hrepack/hrepack_vs.c b/mfhdf/hrepack/hrepack_vs.c
index ce037ea..6ce3dd3 100644
--- a/mfhdf/hrepack/hrepack_vs.c
+++ b/mfhdf/hrepack/hrepack_vs.c
@@ -103,7 +103,7 @@ int copy_vs( int32 infile_id,
if ( options->trip==0 ) {
if (VSdetach (vdata_id)==FAIL)
printf( "Failed to detach vdata <%s>\n", path_name);
- if (path) free(path);
+ if (path) HDfree(path);
return 0;
}
@@ -116,7 +116,7 @@ int copy_vs( int32 infile_id,
if (VSinquire(vdata_id, &n_records, &interlace_mode, fieldname_list,
&vdata_size, vdata_name) == FAIL) {
printf( "Failed to get info for vdata ref %ld\n", ref);
- if (path) free(path);
+ if (path) HDfree(path);
return-1;
}
@@ -130,7 +130,7 @@ int copy_vs( int32 infile_id,
if ((vdata_out = VSattach (outfile_id, -1, "w")) == FAIL) {
printf( "Failed to create new VS <%s>\n", path);
VSdetach (vdata_id);
- if (path) free(path);
+ if (path) HDfree(path);
return -1;
}
if (VSsetname (vdata_out, vdata_name)== FAIL) {
@@ -193,7 +193,7 @@ int copy_vs( int32 infile_id,
}
if (n_records>0)
{
- if ((buf = (uint8 *)malloc( (size_t)(n_records * vdata_size))) == NULL ){
+ if ((buf = (uint8 *)HDmalloc( (size_t)(n_records * vdata_size))) == NULL ){
printf( "Failed to get memory for new VS <%s>\n", path);
ret=-1;
goto out;
@@ -276,10 +276,10 @@ out:
}
if (path)
- free(path);
+ HDfree(path);
if (buf)
- free(buf);
-
+ HDfree(buf);
+
return ret;
}
@@ -308,7 +308,7 @@ int copy_vdata_attribute(int32 in, int32 out, int32 findex, intn attrindex)
VSattrinfo(in, findex, attrindex, attr_name, &attr_type, &n_values, &attr_size);
/* Allocate space for attribute values */
- if ((values = (VOIDP)malloc((size_t)(attr_size * n_values))) == NULL) {
+ if ((values = (VOIDP)HDmalloc((size_t)(attr_size * n_values))) == NULL) {
printf( "Cannot allocate %ld values of size %ld for attribute %s",
n_values, attr_size, attr_name);
return-1;
@@ -317,19 +317,19 @@ int copy_vdata_attribute(int32 in, int32 out, int32 findex, intn attrindex)
/* Read attribute from input object */
if (VSgetattr(in, findex, attrindex, values) == FAIL) {
printf( "Cannot read attribute %s\n", attr_name);
- if (values) free(values);
+ if (values) HDfree(values);
return-1;
}
/* Write attribute to output object */
if (VSsetattr(out, findex, attr_name, attr_type, n_values, values) == FAIL) {
printf( "Cannot write attribute %s\n", attr_name);
- if (values) free(values);
+ if (values) HDfree(values);
return-1;
}
-
- if (values) free(values);
-
+
+ if (values) HDfree(values);
+
return 1;
}
diff --git a/mfhdf/hrepack/hrepacktst.c b/mfhdf/hrepack/hrepacktst.c
index 82dc5ac..33db075 100644
--- a/mfhdf/hrepack/hrepacktst.c
+++ b/mfhdf/hrepack/hrepacktst.c
@@ -127,11 +127,11 @@ int read_data(const char* fname)
if ( g_image_data != NULL )
{
- free( g_image_data );
+ HDfree( g_image_data );
g_image_data=NULL;
}
- g_image_data = (unsigned char*)malloc(w*h*color_planes*sizeof(unsigned char));
+ g_image_data = (unsigned char*)HDmalloc(w*h*color_planes*sizeof(unsigned char));
for (i = 0; i < h*w*color_planes ; i++)
{
@@ -476,9 +476,9 @@ out:
GRendaccess(ri1_id);
GRendaccess(ri2_id);
if (buf1)
- free(buf1);
+ HDfree(buf1);
if (buf2)
- free(buf2);
+ HDfree(buf2);
return cmp;
}
@@ -982,10 +982,10 @@ int add_gr_ffile(const char* name_file,
/* compose the name of the file to open, using the srcdir, if appropriate */
if ( srcdir )
{
- strcpy(data_file, srcdir);
- strcat(data_file, "/");
+ HDstrcpy(data_file, srcdir);
+ HDstrcat(data_file, "/");
}
- strcat( data_file, name_file);
+ HDstrcat( data_file, name_file);
if ( read_data(data_file) > 0 )
{
@@ -1049,7 +1049,7 @@ int add_gr_ffile(const char* name_file,
if ( g_image_data != NULL )
{
- free( g_image_data );
+ HDfree( g_image_data );
g_image_data=NULL;
}
@@ -1336,10 +1336,10 @@ int add_r8(const char* image_file,
/* compose the name of the file to open, using the srcdir, if appropriate */
if ( srcdir )
{
- strcpy(data_file, srcdir);
- strcat(data_file, "/");
+ HDstrcpy(data_file, srcdir);
+ HDstrcat(data_file, "/");
}
- strcat( data_file, image_file);
+ HDstrcat( data_file, image_file);
if ( read_data(data_file) > 0 )
{
@@ -1374,7 +1374,7 @@ int add_r8(const char* image_file,
if ( g_image_data != NULL )
{
- free( g_image_data );
+ HDfree( g_image_data );
g_image_data=NULL;
}
@@ -1411,10 +1411,10 @@ int add_r24(const char* image_file,
/* compose the name of the file to open, using the srcdir, if appropriate */
if ( srcdir )
{
- strcpy(data_file, srcdir);
- strcat(data_file, "/");
+ HDstrcpy(data_file, srcdir);
+ HDstrcat(data_file, "/");
}
- strcat( data_file, image_file);
+ HDstrcat( data_file, image_file);
if ( read_data(data_file) > 0 )
{
@@ -1451,7 +1451,7 @@ int add_r24(const char* image_file,
if ( g_image_data != NULL )
{
- free( g_image_data );
+ HDfree( g_image_data );
g_image_data=NULL;
}
diff --git a/mfhdf/libsrc/CMakeLists.txt b/mfhdf/libsrc/CMakeLists.txt
index 6dd2c6f..3b9128a 100644
--- a/mfhdf/libsrc/CMakeLists.txt
+++ b/mfhdf/libsrc/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required (VERSION 2.8.10)
+cmake_minimum_required (VERSION 3.1)
PROJECT (HDF4_MFHDF_LIBSRC C CXX)
#-----------------------------------------------------------------------------
@@ -61,24 +61,52 @@ endif (CYGWIN)
INCLUDE_DIRECTORIES (${HDF4_HDFSOURCE_DIR})
INCLUDE_DIRECTORIES (${HDF4_MFHDFSOURCE_DIR})
-add_definitions (-DHDF)
-
-if (WIN32)
- add_definitions (-DDOS_FS -DSWAP)
-endif (WIN32)
-
-add_library (${HDF4_MF_LIB_TARGET} ${LIB_TYPE} ${HDF4_MFHDF_LIBSRC_CSRCS} ${HDF4_MFHDF_LIBSRC_CHDRS})
+add_library (${HDF4_MF_LIB_TARGET} STATIC ${HDF4_MFHDF_LIBSRC_CSRCS} ${HDF4_MFHDF_LIBSRC_CHDRS})
if (HDF4_BUILD_XDR_LIB)
- if (WIN32)
- add_definitions (-DNO_SYS_XDR_INC)
- endif (WIN32)
INCLUDE_DIRECTORIES (${HDF4_MFHDF_XDR_DIR})
target_link_libraries (${HDF4_MF_LIB_TARGET} ${HDF4_MF_XDR_LIB_TARGET})
endif (HDF4_BUILD_XDR_LIB)
-TARGET_C_PROPERTIES (${HDF4_MF_LIB_TARGET} " " " ")
+TARGET_C_PROPERTIES (${HDF4_MF_LIB_TARGET} STATIC " " " ")
target_link_libraries (${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET})
set_global_variable (HDF4_LIBRARIES_TO_EXPORT "${HDF4_LIBRARIES_TO_EXPORT};${HDF4_MF_LIB_TARGET}")
-H4_SET_LIB_OPTIONS (${HDF4_MF_LIB_TARGET} ${HDF4_MF_LIB_NAME} ${LIB_TYPE})
+H4_SET_LIB_OPTIONS (${HDF4_MF_LIB_TARGET} ${HDF4_MF_LIB_NAME} STATIC)
+set_target_properties (${HDF4_MF_LIB_TARGET} PROPERTIES
+ FOLDER libraries
+ COMPILE_DEFINITIONS "HDF"
+ INTERFACE_INCLUDE_DIRECTORIES "$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/include>"
+)
+if (WIN32)
+ set_property (TARGET ${HDF4_MF_LIB_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS "DOS_FS;SWAP")
+ if (HDF4_BUILD_XDR_LIB)
+ set_property (TARGET ${HDF4_MF_LIB_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS "NO_SYS_XDR_INC")
+ endif (HDF4_BUILD_XDR_LIB)
+endif (WIN32)
+set (install_targets ${HDF4_MF_LIB_TARGET})
+
+if (BUILD_SHARED_LIBS)
+ add_library (${HDF4_MF_LIBSH_TARGET} SHARED ${HDF4_MFHDF_LIBSRC_CSRCS} ${HDF4_MFHDF_LIBSRC_CHDRS})
+ if (HDF4_BUILD_XDR_LIB)
+ INCLUDE_DIRECTORIES (${HDF4_MFHDF_XDR_DIR})
+ target_link_libraries (${HDF4_MF_LIBSH_TARGET} ${HDF4_MF_XDR_LIBSH_TARGET})
+ endif (HDF4_BUILD_XDR_LIB)
+ TARGET_C_PROPERTIES (${HDF4_MF_LIBSH_TARGET} SHARED " " " ")
+ target_link_libraries (${HDF4_MF_LIBSH_TARGET} ${HDF4_SRC_LIBSH_TARGET})
+ set_global_variable (HDF4_LIBRARIES_TO_EXPORT "${HDF4_LIBRARIES_TO_EXPORT};${HDF4_MF_LIBSH_TARGET}")
+ H4_SET_LIB_OPTIONS (${HDF4_MF_LIBSH_TARGET} ${HDF4_MF_LIB_NAME} SHARED)
+ set_target_properties (${HDF4_MF_LIBSH_TARGET} PROPERTIES
+ FOLDER libraries
+ COMPILE_DEFINITIONS "HDF;H4_BUILT_AS_DYNAMIC_LIB"
+ INTERFACE_INCLUDE_DIRECTORIES "$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/include>"
+ INTERFACE_COMPILE_DEFINITIONS H4_BUILT_AS_DYNAMIC_LIB=1
+ )
+ if (WIN32)
+ set_property (TARGET ${HDF4_MF_LIBSH_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS "DOS_FS;SWAP")
+ if (HDF4_BUILD_XDR_LIB)
+ set_property (TARGET ${HDF4_MF_LIBSH_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS "NO_SYS_XDR_INC")
+ endif (HDF4_BUILD_XDR_LIB)
+ endif (WIN32)
+ set (install_targets ${install_targets} ${HDF4_MF_LIBSH_TARGET})
+endif (BUILD_SHARED_LIBS)
#-----------------------------------------------------------------------------
# Add file(s) to CMake Install
@@ -96,15 +124,16 @@ install (
# Add library to CMake Install : Installs lib and cmake config info
#-----------------------------------------------------------------------------
if (BUILD_SHARED_LIBS)
- INSTALL_TARGET_PDB (${HDF4_MF_LIB_TARGET} ${HDF4_INSTALL_LIB_DIR} libraries)
+ INSTALL_TARGET_PDB (${HDF4_MF_LIBSH_TARGET} ${HDF4_INSTALL_BIN_DIR} libraries)
endif (BUILD_SHARED_LIBS)
-
+
install (
- TARGETS
- ${HDF4_MF_LIB_TARGET}
- EXPORT
+ TARGETS
+ ${install_targets}
+ EXPORT
${HDF4_EXPORTED_TARGETS}
- LIBRARY DESTINATION ${HDF4_INSTALL_LIB_DIR} COMPONENT libraries
+ LIBRARY DESTINATION ${HDF4_INSTALL_LIB_DIR} COMPONENT libraries
ARCHIVE DESTINATION ${HDF4_INSTALL_LIB_DIR} COMPONENT libraries
RUNTIME DESTINATION ${HDF4_INSTALL_BIN_DIR} COMPONENT libraries
+ FRAMEWORK DESTINATION ${HDF4_INSTALL_FWRK_DIR} COMPONENT libraries
)
diff --git a/mfhdf/libsrc/Makefile.in b/mfhdf/libsrc/Makefile.in
index 846780e..799c817 100644
--- a/mfhdf/libsrc/Makefile.in
+++ b/mfhdf/libsrc/Makefile.in
@@ -91,7 +91,20 @@ F77LINK = $(LIBTOOL) $(AM_V_lt) --tag=F77 $(AM_LIBTOOLFLAGS) \
$(AM_LDFLAGS) $(LDFLAGS) -o $@
subdir = mfhdf/libsrc
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -265,12 +278,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -313,11 +341,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/mfhdf/libsrc/array.c b/mfhdf/libsrc/array.c
index 807cc16..3416497 100644
--- a/mfhdf/libsrc/array.c
+++ b/mfhdf/libsrc/array.c
@@ -14,7 +14,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/* $Id: array.c 6032 2014-01-17 18:13:52Z acheng $ */
+/* $Id: array.c 6431 2016-06-14 13:48:03Z bmribler $ */
#include <string.h>
#include "local_nc.h"
@@ -249,19 +249,19 @@ const void *values ;
ret->type = type ;
ret->szof = NC_typelen(type) ;
-#ifdef DEBUG
+#ifdef SDDEBUG
fprintf(stderr, "NC_new_array(): type=%u, NC_typelen(type)=%u\n",(unsigned)type,(unsigned)ret->szof);
#endif
ret->count = count ;
memlen = count * ret->szof ;
ret->len = count * NC_xtypelen(type) ;
-#ifdef DEBUG
+#ifdef SDDEBUG
fprintf(stderr, "NC_new_array(): count=%u, memlen=%u\n",count,memlen);
#endif
if( count != 0 )
{
ret->values = (Void*)HDmalloc(memlen) ;
-#ifdef DEBUG
+#ifdef SDDEBUG
fprintf(stderr, "NC_new_array(): ret->values=%p, values=%p\n",ret->values,values);
#endif
if(ret->values == NULL)
@@ -277,7 +277,7 @@ const void *values ;
ret->values = NULL ;
}
-#ifdef DEBUG
+#ifdef SDDEBUG
fprintf(stderr, "NC_new_array(): ret=%p\n",ret);
#endif
return(ret) ;
@@ -549,10 +549,10 @@ xdr_NC_array(xdrs, app)
NC_array **app;
{
bool_t (*xdr_NC_fnct)() ;
- u_long count , *countp=NULL ;
- nc_type type , *typep=NULL ;
+ u_long count = 0, *countp=NULL ;
+ nc_type type = NC_UNSPECIFIED, *typep=NULL ;
bool_t stat ;
- Void *vp ;
+ Void *vp = NULL;
switch (xdrs->x_op) {
case XDR_FREE:
@@ -577,13 +577,18 @@ xdr_NC_array(xdrs, app)
typep = &type ;
break ;
}
+
+ /* This USE_ENUM may not be necessary after xdr and code cleanup.
+ See HDFFR-1318, HDFFR-1327, and other Mac/XDR issues for details.
+ I had tried and xdr_enum worked consistently even though there were
+ failures in other places. -BMR, 6/14/2016 */
#ifdef USE_ENUM
- if (! xdr_enum(xdrs, (enum_t *)typep)) {
+ if (! xdr_enum(xdrs, (enum_t *)typep)) {
NCadvise(NC_EXDR, "xdr_NC_array:xdr_enum") ;
return (FALSE);
}
#else
- if (! xdr_int(xdrs, typep)) {
+ if (! xdr_int(xdrs, typep)) {
NCadvise(NC_EXDR, "xdr_NC_array:xdr_int (enum)") ;
return (FALSE);
}
diff --git a/mfhdf/libsrc/cdf.c b/mfhdf/libsrc/cdf.c
index 97e69e0..579fada 100644
--- a/mfhdf/libsrc/cdf.c
+++ b/mfhdf/libsrc/cdf.c
@@ -14,7 +14,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/* $Id: cdf.c 6032 2014-01-17 18:13:52Z acheng $ */
+/* $Id: cdf.c 6450 2016-06-16 06:38:24Z bmribler $ */
#include "local_nc.h"
#include "alloc.h"
@@ -147,131 +147,155 @@ done:
#ifdef HDF
-/******************************************************************************/
+/* --------------------------- hdf_get_magicnum ---------------------------- */
/*
- From NASA CDF Source
-*/
-#define V2_MAGIC_NUMBER 0x0000FFFF /* Written twice at the beginning of file */
-#define V2_MAGIC_OFFSET 0
+ Return the file's format version number, i.e., magic number. This number
+ can be used to determine the format type of a file, such as HDF, CDF, or
+ netCDF/64-bit.
-/* -------------------------------- HDiscdf -------------------------------- */
-/*
- Return TRUE/FALSE depending on if the given file is a NASA CDF file
+ Refactored out from existing functions. -BMR, Jun 7, 2016
*/
-intn
-HDiscdf(filename)
-const char * filename;
+int32 hdf_get_magicnum(filename)
+const char *filename;
{
-
- static const char *FUNC = "HDiscdf";
+ CONSTR(FUNC, "hdf_get_magicnum"); /* for HERROR */
hdf_file_t fp;
- uint8 b[4];
- uint8 * bb = NULL;
+ uint8 buf[4];
+ uint8 *pbuf = NULL;
int32 magic_num;
- intn ret_value = TRUE;
+ int32 ret_value = 0;
fp = (hdf_file_t)HI_OPEN(filename, DFACC_READ);
if (OPENERR(fp))
- {
- ret_value = FALSE;
- goto done;
- }
- else
- {
- if(HI_SEEK(fp, V2_MAGIC_OFFSET) == FAIL)
- {
- HERROR(DFE_SEEKERROR);
- ret_value = FALSE;
- goto done;
- }
+ {
+ HGOTO_ERROR(DFE_BADNAME, FAIL);
+ }
+
+ /* Make sure it is at the beginning of the file */
+ if(HI_SEEK(fp, MAGICOFFSET) == FAIL)
+ {
+ HGOTO_ERROR(DFE_SEEKERROR, FAIL);
+ }
- if(HI_READ(fp, b, 4) == FAIL)
- {
- HERROR(DFE_READERROR);
- ret_value = FALSE;
- goto done;
- }
+ /* Read the first 4 bytes in the file, where the format version number
+ is stored. */
+ if(HI_READ(fp, buf, MAGICLEN) == FAIL)
+ {
+ HI_CLOSE(fp);
+ HGOTO_ERROR(DFE_READERROR, FAIL);
+ }
- bb = &b[0];
+ /* Obtain the file format version number then close the file*/
+ pbuf = &buf[0];
+ INT32DECODE(pbuf, magic_num);
+ HI_CLOSE(fp);
- INT32DECODE(bb, magic_num);
+ /* If magic_num is a valid file format version number, then return it */
+ if (magic_num == HDFXMAGIC || magic_num == CDFMAGIC ||
+ magic_num == NCMAGIC || magic_num == NCMAGIC64)
+ ret_value = magic_num;
+ else
+ HGOTO_ERROR(DFE_INVFILE, FAIL);
+done:
+ if (ret_value == FALSE)
+ { /* Failure cleanup */
+ }
+ /* Normal cleanup */
- if(magic_num == V2_MAGIC_NUMBER)
- ret_value = TRUE;
- else
- ret_value = FALSE;
+ return ret_value;
+} /* hdf_get_magicnum */
- HI_CLOSE(fp);
- }
+/* -------------------------------- HDiscdf -------------------------------- */
+/*
+ Return TRUE/FALSE depending on if the given file is a NASA CDF file
+*/
+intn HDiscdf(filename)
+const char *filename;
+{
+ CONSTR(FUNC, "HDiscdf"); /* for HGOTO_ERROR */
+ int32 magic_num = 0;
+ intn ret_value = FALSE;
+
+ /* Use internal function to open the file and get a magic number if the
+ file has one */
+ magic_num = hdf_get_magicnum(filename);
+
+ /* A CDF file would have CDFMAGIC at the beginning */
+ if (magic_num == CDFMAGIC)
+ ret_value = TRUE;
+ else
+ ret_value = FALSE;
done:
if (ret_value == FALSE)
- { /* FALSE cleanup ?*/
-
+ { /* Failure cleanup */
}
/* Normal cleanup */
return ret_value;
}
+/* -------------------------------- HDisnetcdf --------------------------------
-/*
- Model after HDiscdf
-*/
-/* -------------------------------- HDisnetcdf -------------------------------- */
-/*
Return TRUE if the given file is a netCDF file, FALSE otherwise.
*/
-intn
-HDisnetcdf(filename)
-const char * filename;
+intn HDisnetcdf(filename)
+const char *filename;
{
-
- static const char *FUNC = "HDisnetcdf";
- hdf_file_t fp;
- uint8 b[4];
- uint8 * bb = NULL;
- int32 magic_num;
- intn ret_value = TRUE;
+ CONSTR(FUNC, "HDisnetcdf"); /* for HGOTO_ERROR */
+ int32 magic_num = 0;
+ intn ret_value = FALSE;
- fp = (hdf_file_t)HI_OPEN(filename, DFACC_READ);
- if (OPENERR(fp))
- {
- ret_value = FALSE;
- goto done;
- }
+ /* Use internal function to open the file and get a magic number if the
+ file has one */
+ magic_num = hdf_get_magicnum(filename);
+
+ /* A classic netCDF file would have NCMAGIC at the beginning */
+ if (magic_num == NCMAGIC)
+ ret_value = TRUE;
else
- {
- if(HI_READ(fp, b, 4) == FAIL)
- {
- HERROR(DFE_READERROR);
- HI_CLOSE(fp);
- ret_value = FALSE;
- goto done;
- }
+ ret_value = FALSE;
- bb = &b[0];
+done:
+ if (ret_value == FALSE)
+ { /* FALSE cleanup */
+ }
+ /* Normal cleanup */
- INT32DECODE(bb, magic_num);
+ return ret_value;
+} /* HDisnetcdf */
- if(magic_num == NCMAGIC)
- ret_value = TRUE;
- else
- ret_value = FALSE;
+/* ------------------------------ HDisnetcdf64 --------------------------------
- HI_CLOSE(fp);
- }
+ Return TRUE if the given file is a netCDF 64-bit file, FALSE otherwise.
+*/
+intn HDisnetcdf64(filename)
+const char *filename;
+{
+ CONSTR(FUNC, "HDisnetcdf64"); /* for HGOTO_ERROR */
+ int32 magic_num = 0;
+ intn ret_value = FALSE;
+
+ /* Use internal function to open the file and get a magic number if the
+ file has one */
+ magic_num = hdf_get_magicnum(filename);
+
+ /* A 64-bit netCDF file would have NCMAGIC64 at the beginning */
+ if (magic_num == NCMAGIC64)
+ ret_value = TRUE;
+ else
+ ret_value = FALSE;
done:
if (ret_value == FALSE)
- { /* FALSE cleanup? */
-
+ { /* FALSE cleanup */
}
/* Normal cleanup */
return ret_value;
-}
+} /* HDisnetcdf64 */
+
/******************************************************************************/
#endif /* HDF */
@@ -754,7 +778,7 @@ NC_xdr_cdf(xdrs, handlep)
*
******************************************************************************
*
-* Please report all bugs / comments to hdfhelp at ncsa.uiuc.edu
+* Please report all bugs / comments to help at hdfgroup.org
*
*****************************************************************************/
diff --git a/mfhdf/libsrc/hdfnctest.c b/mfhdf/libsrc/hdfnctest.c
index d3538e3..02af1f5 100644
--- a/mfhdf/libsrc/hdfnctest.c
+++ b/mfhdf/libsrc/hdfnctest.c
@@ -10,10 +10,6 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5109 $";
-#endif
-
#include "mfhdf.h"
#include "hdftest.h"
diff --git a/mfhdf/libsrc/hdfsds.c b/mfhdf/libsrc/hdfsds.c
index c96f333..3e8d172 100644
--- a/mfhdf/libsrc/hdfsds.c
+++ b/mfhdf/libsrc/hdfsds.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6043 $";
-#endif
-
-/* $Id: hdfsds.c 6043 2014-01-21 21:09:03Z acheng $ */
+/* $Id: hdfsds.c 6441 2016-06-14 21:31:36Z bmribler $ */
/***************************************************************************
*
@@ -68,6 +64,12 @@ static char RcsId[] = "@(#)$Revision: 6043 $";
#define SDG_MAX_INITIAL 100
+/* local macros */
+/* A variation of HGOTO_ERROR macro, but instead of label "done:",
+ it is for label "done_adesc:", which is only in this file. */
+#define HGOTO_ADESC_ERROR(err, ret_val) {HERROR(err); ret_value = ret_val; \
+ goto done_adesc;}
+
/* local variables */
PRIVATE intn sdgCurrent;
PRIVATE intn sdgMax;
@@ -132,6 +134,7 @@ hdf_query_seen_sdg(uint16 ndgRef)
PRIVATE intn
hdf_register_seen_sdg(uint16 sdgRef)
{
+ CONSTR(FUNC, "hdf_register_seen_sdg"); /* for HERROR */
intn ret_value = SUCCEED;
/* check if table is allocated */
@@ -141,8 +144,7 @@ hdf_register_seen_sdg(uint16 sdgRef)
sdgTable = (uint16 *) HDmalloc(sdgMax * sizeof(uint16));
if (sdgTable == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
sdgCurrent = 0;
}
@@ -157,8 +159,7 @@ hdf_register_seen_sdg(uint16 sdgRef)
sdgTable = (uint16 *) HDrealloc((VOIDP) sdgTable, sdgMax * sizeof(uint16));
if (sdgTable == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
}
@@ -172,6 +173,40 @@ done:
return ret_value;
} /* hdf_register_seen_sdg */
+/******************************************************************************
+ NAME
+ hdf_check_nt - internal utility function
+
+ DESCRIPTION
+ Test if data was stored in native format of different machine or in the
+ LITEND format, and make sure the numbertype version numbers are the same
+ so we don't read it twice
+ (Refactored from hdf_read_ndgs)
+
+ RETURNS
+ SUCCEED / FAIL
+
+******************************************************************************/
+intn hdf_check_nt(uint8 *ntstring, int32 *type)
+{
+ intn ret_value = SUCCEED;
+ if ((ntstring[0] != DFNT_VERSION)
+ || ((ntstring[3] != DFNTF_NONE) && (ntstring[3] != DFNTF_IEEE)))
+ {
+ if (ntstring[3] == DFNTF_PC) /* Little Endian */
+ *type |= DFNT_LITEND;
+ else
+ { /* same machine type? */
+ if (ntstring[3] == DFKgetPNSC(*type, DF_MT))
+ *type |= DFNT_NATIVE;
+ else /* different machine */
+ {
+ ret_value = FAIL;
+ }
+ } /* machine type */
+ } /* Little Endian */
+ return(ret_value);
+} /* hdf_check_nt */
/******************************************************************************
NAME
@@ -188,7 +223,7 @@ done:
PRIVATE intn
hdf_read_ndgs(NC *handle)
{
- static const char *FUNC = "hdf_read_ndg_dims";
+ CONSTR(FUNC, "hdf_read_ndgs"); /* for HERROR */
char tmpname[80] = "";
uint8 ntstring[4] = "";
intn dimcount;
@@ -212,6 +247,7 @@ hdf_read_ndgs(NC *handle)
int32 *dimsizes = NULL;
int32 *scaletypes = NULL;
int32 HDFtype;
+int32 temptype;
intn dim;
intn max_thangs;
intn current_dim;
@@ -247,37 +283,19 @@ hdf_read_ndgs(NC *handle)
dims = (NC_dim **) HDmalloc(sizeof(NC_dim *) * max_thangs);
if(NULL == dims)
{
- HERROR(DFE_NOSPACE);
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
vars = (NC_var **) HDmalloc(sizeof(NC_var *) * max_thangs);
if(NULL == vars)
{
- HERROR(DFE_NOSPACE);
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
attrs = (NC_attr **) HDmalloc(sizeof(NC_attr *) * max_thangs);
if(NULL == attrs)
{
- HERROR(DFE_NOSPACE);
- ret_value = FAIL;
- goto done;
- }
-
- /* Check if temproray buffer has been allocated */
- if (ptbuf == NULL)
- {
- ptbuf = (uint8 *)HDmalloc(TBUF_SZ * sizeof(uint8));
- if (ptbuf == NULL)
- {
- HERROR(DFE_NOSPACE);
- ret_value = FAIL;
- goto done;
- }
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
/* no dimensions or variables yet */
@@ -311,11 +329,7 @@ hdf_read_ndgs(NC *handle)
if(HQuerytagref(aid, &ndgTag, &ndgRef) == FAIL)
{
-#ifdef DEBUG
- fprintf(stderr, "Call to Hinquire failed\n");
-#endif
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
/* Test if its an SDG-NDG which we've processed already */
@@ -333,8 +347,7 @@ hdf_read_ndgs(NC *handle)
/* read the group into memory */
if ((GroupID = DFdiread(handle->hdf_file, ndgTag, ndgRef)) < 0)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
sddRef = lRef = uRef = fRef = sRef = sdRef = 0;
@@ -360,6 +373,18 @@ hdf_read_ndgs(NC *handle)
* is finished.
*/
+ /* Check if temproray buffer has been allocated */
+ if (ptbuf == NULL)
+ {
+ ptbuf = (uint8 *)HDmalloc(TBUF_SZ * sizeof(uint8));
+ if (ptbuf == NULL)
+ {
+ HERROR(DFE_NOSPACE);
+ ret_value = FAIL;
+ goto done;
+ }
+ }
+
while (!DFdiget(GroupID, &tmpTag, &tmpRef))
{
switch(tmpTag)
@@ -368,15 +393,13 @@ hdf_read_ndgs(NC *handle)
aid1 = Hstartread(handle->hdf_file, tmpTag, tmpRef);
if (aid1 == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTACCESS, FAIL);
}
/* read rank */
if (Hread(aid1, (int32) 2, ptbuf) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_READERROR, FAIL);
}
p = ptbuf;
@@ -386,29 +409,25 @@ hdf_read_ndgs(NC *handle)
dimsizes = (int32 *) HDmalloc((uint32) rank * sizeof(int32));
if (dimsizes == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
vardims = (intn *) HDmalloc((uint32) rank * sizeof(intn));
if (vardims == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
scaletypes = (int32 *) HDmalloc((uint32) rank * sizeof(int32));
if (scaletypes == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
/* read dimension record */
if (Hread(aid1, (int32) 4 * rank, ptbuf) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_READERROR, FAIL);
}
p = ptbuf;
@@ -418,8 +437,7 @@ hdf_read_ndgs(NC *handle)
/* read tag/ref of NT */
if (Hread(aid1,(int32) 4, ptbuf) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_READERROR, FAIL);
}
p = ptbuf;
UINT16DECODE(p, ntTag);
@@ -428,8 +446,7 @@ hdf_read_ndgs(NC *handle)
/* read actual NT */
if (Hgetelement(handle->hdf_file, ntTag, ntRef, ntstring) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_GETELEM, FAIL);
}
HDFtype = ntstring[1];
@@ -437,40 +454,21 @@ hdf_read_ndgs(NC *handle)
{
#ifdef DEBUG
/* replace it with NCAdvice or HERROR? */
- fprintf(stderr "hdf_read_ndgs: hdf_unmap_type failed for %d\n", HDFtype);
+ fprintf(stderr, "hdf_read_ndgs: hdf_unmap_type failed for %d\n", HDFtype);
#endif
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
- /* test if data was stored in native format of different
- machine or in the LITEND format, and make sure the
- numbertype version numbers are the same */
- if ((ntstring[0] != DFNT_VERSION)
- || ((ntstring[3] != DFNTF_NONE)
- && (ntstring[3] != DFNTF_IEEE)))
- {
- if (ntstring[3] == DFNTF_PC) /* Little Endian */
- HDFtype |= DFNT_LITEND;
- else
- { /* same machine type? */
- if (ntstring[3] == DFKgetPNSC(HDFtype, DF_MT))
- HDFtype |= DFNT_NATIVE;
- else /* different machine */
- {
- ret_value = FAIL;
- goto done;
- }
- } /* machine type */
- } /* Little Endian */
+ /* Validate number type regarding platform/format */
+ if (hdf_check_nt(ntstring, &HDFtype) == FAIL)
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
/* read in scale NTs */
for(i = 0; i < rank; i++)
{
if (Hread(aid1,(int32) 4, ptbuf) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_READERROR, FAIL);
}
p = ptbuf;
@@ -480,36 +478,26 @@ hdf_read_ndgs(NC *handle)
/* read NT of this scale (dimension) */
if (Hgetelement(handle->hdf_file, ntTag, ntRef, ntstring) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_GETELEM, FAIL);
}
-
+
scaletypes[i] = ntstring[1];
+
+ /* temp preserve scaletype in case of error */
+ temptype = scaletypes[i];
+
/* check native format and LITEND */
- if ((ntstring[0] != DFNT_VERSION)
- || ((ntstring[3] != DFNTF_NONE)
- && (ntstring[3] != DFNTF_IEEE)))
- {
- if (ntstring[3] == DFNTF_PC) /* Little Endian */
- scaletypes[i] |= DFNT_LITEND;
- else
- { /* same machine type? */
- if (ntstring[3] == DFKgetPNSC(HDFtype, DF_MT))
- scaletypes[i] |= DFNT_NATIVE;
- else /* different machine */
- {
- ret_value = FAIL;
- goto done;
- }
- } /* scale machine type */
- } /* Little Endian */
+ if (hdf_check_nt(ntstring, &temptype) == FAIL)
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
+
+ /* restore scaletype */
+ scaletypes[i] = temptype;
}
sddRef = tmpRef; /* prepare for a new dim var */
if (Hendaccess(aid1) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTENDACCESS, FAIL);
}
break;
@@ -536,23 +524,19 @@ hdf_read_ndgs(NC *handle)
len = Hlength(handle->hdf_file, DFTAG_SDC, tmpRef);
if (len == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
coordbuf = (uint8 *) HDmalloc((uint32) len + 1);
if (NULL == coordbuf)
{
- HERROR(DFE_NOSPACE);
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
if (Hgetelement(handle->hdf_file, DFTAG_SDC, tmpRef, coordbuf) == FAIL)
{
HDfreespace((VOIDP)coordbuf);
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_GETELEM, FAIL);
}
coordbuf[len] = '\0';
@@ -563,8 +547,7 @@ hdf_read_ndgs(NC *handle)
NC_CHAR, HDstrlen(coordbuf), coordbuf);
if (NULL == attrs[current_attr])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
attrs[current_attr++]->HDFtype = DFNT_CHAR;
@@ -589,8 +572,7 @@ hdf_read_ndgs(NC *handle)
*/
if (Hgetelement(handle->hdf_file, tmpTag, tmpRef, ptbuf) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_GETELEM, FAIL);
}
if (Hlength(handle->hdf_file, tmpTag, tmpRef) == 36)
@@ -601,8 +583,7 @@ hdf_read_ndgs(NC *handle)
(VOIDP) tBuf,
DFNT_FLOAT64, 4, DFACC_READ, 0, 0))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_BADCONV, FAIL);
}
attrs[current_attr] =
@@ -613,8 +594,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == attrs[current_attr])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
attrs[current_attr++]->HDFtype = DFNT_FLOAT64;
@@ -627,8 +607,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == attrs[current_attr])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
attrs[current_attr++]->HDFtype = DFNT_FLOAT64;
@@ -641,8 +620,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == attrs[current_attr])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
attrs[current_attr++]->HDFtype = DFNT_FLOAT64;
@@ -655,8 +633,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == attrs[current_attr])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
attrs[current_attr++]->HDFtype = DFNT_FLOAT64;
@@ -666,8 +643,7 @@ hdf_read_ndgs(NC *handle)
(VOIDP) tBuf,
DFNT_INT32, 1, DFACC_READ, 0,0))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_BADCONV, FAIL);
}
@@ -679,8 +655,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == attrs[current_attr])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
attrs[current_attr++]->HDFtype = DFNT_INT32;
@@ -694,8 +669,7 @@ hdf_read_ndgs(NC *handle)
(VOIDP)tBuf,
DFNT_FLOAT32, 4, DFACC_READ, 0, 0))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_BADCONV, FAIL);
}
@@ -707,8 +681,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == attrs[current_attr])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
attrs[current_attr++]->HDFtype = DFNT_FLOAT32;
@@ -721,8 +694,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == attrs[current_attr])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
attrs[current_attr++]->HDFtype = DFNT_FLOAT32;
@@ -735,8 +707,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == attrs[current_attr])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
attrs[current_attr++]->HDFtype = DFNT_FLOAT32;
@@ -749,8 +720,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == attrs[current_attr])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
attrs[current_attr++]->HDFtype = DFNT_FLOAT32;
@@ -760,8 +730,7 @@ hdf_read_ndgs(NC *handle)
(VOIDP) tBuf,
DFNT_INT16, 1, DFACC_READ, 0,0))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_BADCONV, FAIL);
}
@@ -773,8 +742,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == attrs[current_attr])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
attrs[current_attr++]->HDFtype = DFNT_INT16;
@@ -786,16 +754,14 @@ hdf_read_ndgs(NC *handle)
if (Hgetelement(handle->hdf_file, tmpTag, tmpRef, ptbuf) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_GETELEM, FAIL);
}
if (FAIL == DFKconvert((VOIDP)ptbuf,
(VOIDP)tBuf,
HDFtype, 2, DFACC_READ, 0, 0))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_BADCONV, FAIL);
}
@@ -807,8 +773,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == attrs[current_attr])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
attrs[current_attr++]->HDFtype = HDFtype;
@@ -821,8 +786,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == attrs[current_attr])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
attrs[current_attr++]->HDFtype = HDFtype;
@@ -834,8 +798,7 @@ hdf_read_ndgs(NC *handle)
if (Hgetelement(handle->hdf_file, tmpTag, tmpRef, ptbuf) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_GETELEM, FAIL);
}
else
{
@@ -853,8 +816,7 @@ hdf_read_ndgs(NC *handle)
if (hdf_register_seen_sdg(sdgRef) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
}
@@ -864,6 +826,13 @@ hdf_read_ndgs(NC *handle)
break;
} /* end switch 'tmpTag */
} /* end while 'DFdiget()'*/
+
+ /* Free local buffer */
+ if (ptbuf != NULL)
+ {
+ HDfree(ptbuf);
+ ptbuf = NULL;
+ }
if(lRef)
{
@@ -876,22 +845,18 @@ hdf_read_ndgs(NC *handle)
len = Hlength(handle->hdf_file, DFTAG_SDL, lRef);
if(len == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
labelbuf = (uint8 *) HDmalloc((uint32) len + 3);
if(NULL == labelbuf)
{
- HERROR(DFE_NOSPACE);
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
if(Hgetelement(handle->hdf_file, DFTAG_SDL, lRef, labelbuf) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_GETELEM, FAIL);
}
labelbuf[len + 2] = '\0';
@@ -909,22 +874,18 @@ hdf_read_ndgs(NC *handle)
len = Hlength(handle->hdf_file, DFTAG_SDU, uRef);
if(len == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
unitbuf = (uint8 *) HDmalloc((uint32) len+3);
if(NULL == unitbuf)
{
- HERROR(DFE_NOSPACE);
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
if(Hgetelement(handle->hdf_file, DFTAG_SDU, uRef, unitbuf) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_GETELEM, FAIL);
}
unitbuf[len + 2] = '\0';
@@ -941,22 +902,18 @@ hdf_read_ndgs(NC *handle)
len = Hlength(handle->hdf_file, DFTAG_SDF, fRef);
if(len == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
formatbuf = (uint8 *) HDmalloc((uint32) len+3);
if(NULL == formatbuf)
{
- HERROR(DFE_NOSPACE);
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
if(Hgetelement(handle->hdf_file, DFTAG_SDF, fRef, formatbuf) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_GETELEM, FAIL);
}
formatbuf[len + 2] = '\0';
@@ -973,22 +930,18 @@ hdf_read_ndgs(NC *handle)
len = Hlength(handle->hdf_file, DFTAG_SDS, sRef);
if(len == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
scalebuf = (uint8 *) HDmalloc((uint32) len);
if(NULL == scalebuf)
{
- HERROR(DFE_NOSPACE);
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
if(Hgetelement(handle->hdf_file, DFTAG_SDS, sRef, scalebuf) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_GETELEM, FAIL);
}
}
@@ -1058,17 +1011,13 @@ hdf_read_ndgs(NC *handle)
dims = (NC_dim **) HDrealloc((VOIDP) dims, sizeof(NC_dim *) * max_thangs);
if(NULL == dims)
{
- HERROR(DFE_NOSPACE);
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
vars = (NC_var **) HDrealloc((VOIDP) vars, sizeof(NC_var *) * max_thangs);
if(NULL == vars)
{
- HERROR(DFE_NOSPACE);
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
}
@@ -1079,8 +1028,7 @@ hdf_read_ndgs(NC *handle)
dims[this_dim] = NC_new_dim(tmpname, dimsizes[dim]);
if (NULL == dims[this_dim])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
/*
@@ -1106,10 +1054,9 @@ hdf_read_ndgs(NC *handle)
{
#ifdef DEBUG
/* replace it with NCAdvice or HERROR? */
- fprintf(stderr "hdf_read_ndgs: hdf_unmap_type failed for %d\n", scaletypes[dim]);
+ fprintf(stderr, "hdf_read_ndgs: hdf_unmap_type failed for %d\n", scaletypes[dim]);
#endif
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
vars[current_var] = NC_new_var(tmpname,
@@ -1118,8 +1065,7 @@ hdf_read_ndgs(NC *handle)
&this_dim);
if (NULL == vars[current_var])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
vars[current_var]->data_tag = DFTAG_SDS; /* not normal data */
@@ -1169,8 +1115,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == dimattrs[dimattrcnt])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
dimattrs[dimattrcnt++]->HDFtype = DFNT_CHAR;
@@ -1186,8 +1131,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == dimattrs[dimattrcnt])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
dimattrs[dimattrcnt++]->HDFtype = DFNT_CHAR;
@@ -1203,8 +1147,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == dimattrs[dimattrcnt])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
dimattrs[dimattrcnt++]->HDFtype = DFNT_CHAR;
@@ -1220,8 +1163,7 @@ hdf_read_ndgs(NC *handle)
(Void *) dimattrs);
if (NULL == vars[current_var]->attrs)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
}
else
@@ -1236,19 +1178,11 @@ hdf_read_ndgs(NC *handle)
dims = (NC_dim **) HDrealloc((VOIDP)dims, sizeof(NC_dim *) * max_thangs);
if(NULL == dims)
- {
- HERROR(DFE_NOSPACE);
- ret_value = FAIL;
- goto done;
- }
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
vars = (NC_var **) HDrealloc((VOIDP)vars, sizeof(NC_var *) * max_thangs);
if(NULL == vars)
- {
- HERROR(DFE_NOSPACE);
- ret_value = FAIL;
- goto done;
- }
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
} /* end if 'new_dim' */
} /* end for 'dim' */
@@ -1283,8 +1217,7 @@ hdf_read_ndgs(NC *handle)
vars[current_var] = NC_new_var(tmpname, type, (int) rank, vardims);
if (NULL == vars[current_var])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
#if 0
@@ -1300,7 +1233,8 @@ hdf_read_ndgs(NC *handle)
/* Indicate that it is unknown whether the current variable
is an SDS or a coordinate variable. bugzilla 624 - BMR -
- 05/16/2007 */
+ 05/16/2007. This looks like a IS_CRDVAR because it's from
+ vardim! -BMR - 6/1/16 */
vars[current_var]->var_type = UNKNOWN;
@@ -1335,8 +1269,7 @@ hdf_read_ndgs(NC *handle)
/* start Annotation inteface */
if ((an_handle = ANstart(handle->hdf_file)) == FAIL)
{
- ret_value = FAIL;
- goto done_adesc;
+ HGOTO_ADESC_ERROR(DFE_ANAPIERROR, FAIL);
}
/* Get number of data descs with this tag/ref */
@@ -1352,8 +1285,7 @@ hdf_read_ndgs(NC *handle)
#ifdef AN_DEBUG
fprintf(stderr,"failed to allocate space for %d descs \n", num_ddescs);
#endif
- ret_value = FAIL;
- goto done_adesc;
+ HGOTO_ADESC_ERROR(DFE_NOSPACE, FAIL);
}
/* get list of desc annotations id's with this tag/ref */
@@ -1362,8 +1294,7 @@ hdf_read_ndgs(NC *handle)
#ifdef AN_DEBUG
fprintf(stderr,"failed to get %d descs list \n", num_ddescs);
#endif
- ret_value = FAIL;
- goto done_adesc;
+ HGOTO_ADESC_ERROR(DFE_ANAPIERROR, FAIL);
}
/* loop through desc list. */
@@ -1374,8 +1305,7 @@ hdf_read_ndgs(NC *handle)
#ifdef AN_DEBUG
fprintf(stderr,"failed to get %d desc length \n", i);
#endif
- ret_value = FAIL;
- goto done_adesc;
+ HGOTO_ADESC_ERROR(DFE_ANAPIERROR, FAIL);
}
/* allocate space for desc */
@@ -1386,8 +1316,7 @@ hdf_read_ndgs(NC *handle)
#ifdef AN_DEBUG
fprintf(stderr,"failed to allocate space for desc %d \n", i);
#endif
- ret_value = FAIL;
- goto done_adesc;
+ HGOTO_ADESC_ERROR(DFE_NOSPACE, FAIL);
}
HDmemset(ann_desc,'\0', ann_len+1);
}
@@ -1398,8 +1327,7 @@ hdf_read_ndgs(NC *handle)
#ifdef AN_DEBUG
fprintf(stderr,"failed to read %d desc \n", i);
#endif
- ret_value = FAIL;
- goto done_adesc;
+ HGOTO_ADESC_ERROR(DFE_ANAPIERROR, FAIL);
}
/* make unique attribute */
@@ -1414,8 +1342,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == attrs[current_attr])
{
- ret_value = FAIL;
- goto done_adesc;
+ HGOTO_ADESC_ERROR(DFE_INTERNAL, FAIL);
}
else
attrs[current_attr++]->HDFtype = DFNT_CHAR;
@@ -1588,8 +1515,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == attrs[current_attr])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
attrs[current_attr++]->HDFtype = DFNT_CHAR;
@@ -1608,8 +1534,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == attrs[current_attr])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
attrs[current_attr++]->HDFtype = DFNT_CHAR;
@@ -1631,8 +1556,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == attrs[current_attr])
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
else
attrs[current_attr++]->HDFtype = DFNT_CHAR;
@@ -1650,8 +1574,7 @@ hdf_read_ndgs(NC *handle)
if (NULL == vars[current_var]->attrs)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
}
else
@@ -1669,17 +1592,13 @@ hdf_read_ndgs(NC *handle)
dims = (NC_dim **) HDrealloc((VOIDP) dims, sizeof(NC_dim *) * max_thangs);
if(NULL == dims)
{
- HERROR(DFE_NOSPACE);
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
vars = (NC_var **) HDrealloc((VOIDP) vars, sizeof(NC_var *) * max_thangs);
if(NULL == vars)
{
- HERROR(DFE_NOSPACE);
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
}
@@ -1711,8 +1630,7 @@ hdf_read_ndgs(NC *handle)
if (Hendaccess(aid) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTENDACCESS, FAIL);
}
/*
@@ -1723,8 +1641,7 @@ hdf_read_ndgs(NC *handle)
handle->dims = NC_new_array(NC_DIMENSION, current_dim, (Void *) dims);
if (NULL == handle->dims)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
}
else
@@ -1735,8 +1652,7 @@ hdf_read_ndgs(NC *handle)
handle->vars = NC_new_array(NC_VARIABLE, current_var, (Void *) vars);
if (NULL == handle->vars)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
}
else
@@ -1761,6 +1677,8 @@ done:
HDfree((VOIDP)vardims);
if (scaletypes != NULL)
HDfree((VOIDP)scaletypes);
+ if (ptbuf != NULL)
+ HDfree(ptbuf);
}
/* Normal cleanup */
@@ -1774,7 +1692,6 @@ done:
return ret_value;
} /* hdf_read_ndgs */
-
/******************************************************************************
NAME
hdf_read_sds_cdf
@@ -1792,6 +1709,7 @@ int
hdf_read_sds_cdf(XDR *xdrs,
NC **handlep)
{
+ CONSTR(FUNC, "hdf_read_sds_cdf"); /* for HERROR */
int32 status;
NC *handle = NULL;
intn ret_value = SUCCEED;
@@ -1810,15 +1728,13 @@ hdf_read_sds_cdf(XDR *xdrs,
handle = (*handlep);
if(NULL == handle)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
status = hdf_read_ndgs(handle);
if(status == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
/* deallocate SDG-NDG space */
diff --git a/mfhdf/libsrc/local_nc.h b/mfhdf/libsrc/local_nc.h
index 10609f7..c98df38 100644
--- a/mfhdf/libsrc/local_nc.h
+++ b/mfhdf/libsrc/local_nc.h
@@ -14,26 +14,26 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/* $Id: local_nc.h 6112 2014-06-04 21:05:09Z byrn $ */
+/* $Id: local_nc.h 6450 2016-06-16 06:38:24Z bmribler $ */
#ifndef _LOCAL_NC_
#define _LOCAL_NC_
#include "H4api_adpt.h"
/*
- * netcdf library 'private' data structures, objects and interfaces
+ * netcdf library 'private' data structures, objects and interfaces
*/
-#include <stddef.h> /* size_t */
-#include <stdio.h> /* FILENAME_MAX */
+#include <stddef.h> /* size_t */
+#include <stdio.h> /* FILENAME_MAX */
#ifndef FILENAME_MAX
#define FILENAME_MAX 255
#endif
/* Do we have system XDR files */
-#ifndef NO_SYS_XDR_INC
-#include <rpc/types.h>
-#include <rpc/xdr.h>
+#ifndef NO_SYS_XDR_INC
+#include <rpc/types.h>
+#include <rpc/xdr.h>
#else /* NO_SYS_XDR_INC */
#include <types.h> /* <types.h */
#include <xdr.h> /* <xdr.h> */
@@ -41,7 +41,7 @@
#include "H4api_adpt.h"
#ifdef H4_HAVE_NETCDF
-#include "netcdf.h" /* needed for defs of nc_type, ncvoid, ... */
+#include "netcdf.h" /* needed for defs of nc_type, ncvoid, ... */
#else
#include "hdf4_netcdf.h"
#endif
@@ -70,7 +70,7 @@
#define VARIABLE "Var0.0"
#define DIMENSION "Dim0.0"
#define UDIMENSION "UDim0.0"
-#define DIM_VALS "DimVal0.0"
+#define DIM_VALS "DimVal0.0"
#define DIM_VALS01 "DimVal0.1"
#define CDF "CDF0.0"
/* DATA is defined in DTM. Change DATA to DATA0 *
@@ -107,19 +107,19 @@ typedef struct vix_t_def {
/* like, a discriminated union in the sense of xdr */
typedef struct {
- nc_type type ; /* the discriminant */
- size_t len ; /* the total length originally allocated */
- size_t szof ; /* sizeof each value */
- unsigned count ; /* length of the array */
- Void *values ; /* the actual data */
+ nc_type type ; /* the discriminant */
+ size_t len ; /* the total length originally allocated */
+ size_t szof ; /* sizeof each value */
+ unsigned count ; /* length of the array */
+ Void *values ; /* the actual data */
} NC_array ;
/* Counted string for names and such */
-/*
+/*
count is the actual size of the buffer for the string
len is the length of the string in the buffer
-
+
count != len when a string is resized to something smaller
*/
@@ -128,55 +128,55 @@ typedef struct {
#endif /* HDF */
typedef struct {
- unsigned count ;
- unsigned len ;
+ unsigned count ;
+ unsigned len ;
#ifdef HDF
uint32 hash; /* [non-perfect] hash value for faster comparisons */
#endif /* HDF */
- char *values ;
+ char *values ;
} NC_string ;
/* Counted array of ints for assoc list */
typedef struct {
- unsigned count ;
- int *values ;
+ unsigned count ;
+ int *values ;
} NC_iarray ;
/* NC dimension stucture */
typedef struct {
- NC_string *name ;
+ NC_string *name ;
long size ;
#ifdef HDF
int32 dim00_compat; /* compatible with Dim0.0 */
- int32 vgid; /* id of the Vgroup representing this dimension */
+ int32 vgid; /* id of the Vgroup representing this dimension */
int32 count; /* Number of pointers to this dimension */
#endif
} NC_dim ;
/* NC attribute */
typedef struct {
- NC_string *name ;
- NC_array *data ;
+ NC_string *name ;
+ NC_array *data ;
#ifdef HDF
- int32 HDFtype; /* it should be in NC_array *data. However, */
+ int32 HDFtype; /* it should be in NC_array *data. However, */
/* NC.dims and NC.vars are NC_array too. */
#endif
} NC_attr ;
typedef struct {
- char path[FILENAME_MAX + 1] ;
- unsigned flags ;
- XDR *xdrs ;
- long begin_rec ; /* (off_t) postion of the first 'record' */
- unsigned long recsize ; /* length of 'record' */
- int redefid ;
- /* below gets xdr'd */
- unsigned long numrecs ; /* number of 'records' allocated */
- NC_array *dims ;
- NC_array *attrs ;
- NC_array *vars ;
+ char path[FILENAME_MAX + 1] ;
+ unsigned flags ;
+ XDR *xdrs ;
+ long begin_rec ; /* (off_t) postion of the first 'record' */
+ unsigned long recsize ; /* length of 'record' */
+ int redefid ;
+ /* below gets xdr'd */
+ unsigned long numrecs ; /* number of 'records' allocated */
+ NC_array *dims ;
+ NC_array *attrs ;
+ NC_array *vars ;
#ifdef HDF
- int32 hdf_file;
+ int32 hdf_file;
int file_type;
int32 vgid;
int hdf_mode; /* mode we are attached for */
@@ -186,52 +186,52 @@ typedef struct {
/* NC variable: description and data */
typedef struct {
- NC_string *name ; /* name->values shows data set's name */
- NC_iarray *assoc ; /* user definition */
- unsigned long *shape ; /* compiled info (Each holds a dimension size. -BMR) */
- unsigned long *dsizes ; /* compiled info (Each element holds the amount of space
- needed to hold values in that dimension, e.g., first dimension
- size is 10, value type is int32=4, then dsizes[0]=4*10=40. -BMR) */
- NC_array *attrs; /* list of attribute structures */
- nc_type type ; /* the discriminant */
- unsigned long len ; /* the total length originally allocated */
- size_t szof ; /* sizeof each value */
- long begin ; /* seek index, often an off_t */
+ NC_string *name ; /* name->values shows data set's name */
+ NC_iarray *assoc ; /* user definition */
+ unsigned long *shape ; /* compiled info (Each holds a dimension size. -BMR) */
+ unsigned long *dsizes ; /* compiled info (Each element holds the amount of space
+ needed to hold values in that dimension, e.g., first dimension
+ size is 10, value type is int32=4, then dsizes[0]=4*10=40. -BMR) */
+ NC_array *attrs; /* list of attribute structures */
+ nc_type type ; /* the discriminant */
+ unsigned long len ; /* the total length originally allocated */
+ size_t szof ; /* sizeof each value */
+ long begin ; /* seek index, often an off_t */
#ifdef HDF
- NC *cdf; /* handle of the file where this var belongs to */
- int32 vgid; /* id of the variable's Vgroup */
- uint16 data_ref;/* ref of the variable's data storage (if exists), default 0 */
- uint16 data_tag;/* tag of the variable's data storage (if exists), default DATA_TAG */
- uint16 ndg_ref; /* ref of ndg for this dataset */
- hdf_vartype_t var_type; /* type of this variable, default UNKNOWN
- IS_SDSVAR == this var is an SDS variable
- IS_CRDVAR == this var is a coordinate variable
- UNKNOWN == because the var was created prior to this distinction.
- This is to distinguish b/w a one-dim data set and a coord var of the same name.
- It's less riskier than using a flag and change the file format, I think. -BMR */
- intn data_offset; /* non-traditional data may not begin at 0 */
- int32 block_size; /* size of the blocks for unlimited dim. datasets, default -1 */
- int numrecs; /* number of records this has been filled up to, for unlimited dim */
- int32 aid; /* aid for DFTAG_SD data */
- int32 HDFtype; /* type of this variable as HDF thinks */
- int32 HDFsize; /* size of this variable as HDF thinks */
+ NC *cdf; /* handle of the file where this var belongs to */
+ int32 vgid; /* id of the variable's Vgroup */
+ uint16 data_ref;/* ref of the variable's data storage (if exists), default 0 */
+ uint16 data_tag;/* tag of the variable's data storage (if exists), default DATA_TAG */
+ uint16 ndg_ref; /* ref of ndg for this dataset */
+ hdf_vartype_t var_type; /* type of this variable, default UNKNOWN
+ IS_SDSVAR == this var is an SDS variable
+ IS_CRDVAR == this var is a coordinate variable
+ UNKNOWN == because the var was created prior to this distinction.
+ This is to distinguish b/w a one-dim data set and a coord var of the same name.
+ It's less riskier than using a flag and change the file format, I think. -BMR */
+ intn data_offset; /* non-traditional data may not begin at 0 */
+ int32 block_size; /* size of the blocks for unlimited dim. datasets, default -1 */
+ int numrecs; /* number of records this has been filled up to, for unlimited dim */
+ int32 aid; /* aid for DFTAG_SD data */
+ int32 HDFtype; /* type of this variable as HDF thinks */
+ int32 HDFsize; /* size of this variable as HDF thinks */
/* These next two flags control when space in the file is allocated
for a new dataset. They are used (currently) in SDwritedata() and
hdf_get_vp_aid() to allocate the full length of a new fixed-size dataset
which is not writing fill values, instead of letting them get created
as an "appendable" dataset and probably get converted into a linked-
block special element when they don't need to be one */
- int32 created; /* BOOLEAN == is newly created */
- int32 set_length; /* BOOLEAN == needs length set */
- int32 is_ragged; /* BOOLEAN == is a ragged array */
- int32 * rag_list; /* size of ragged array lines */
- int32 rag_fill; /* last line in rag_list to be set */
- vix_t * vixHead; /* list of VXR records for CDF data storage */
+ int32 created; /* BOOLEAN == is newly created */
+ int32 set_length; /* BOOLEAN == needs length set */
+ int32 is_ragged; /* BOOLEAN == is a ragged array */
+ int32 * rag_list; /* size of ragged array lines */
+ int32 rag_fill; /* last line in rag_list to be set */
+ vix_t * vixHead; /* list of VXR records for CDF data storage */
#endif
} NC_var ;
#define IS_RECVAR(vp) \
- ((vp)->shape != NULL ? (*(vp)->shape == NC_UNLIMITED) : 0 )
+ ((vp)->shape != NULL ? (*(vp)->shape == NC_UNLIMITED) : 0 )
#define netCDF_FILE 0
#define HDF_FILE 1
@@ -239,20 +239,33 @@ typedef struct {
HDFLIBAPI const char *cdf_routine_name ; /* defined in lerror.c */
- /* C D F 1 */
-#define NCMAGIC 0x43444601
- /* C D L 1 */
-#define NCLINKMAGIC 0x43444c01
+#define MAGICOFFSET 0 /* Offset where format version number is written */
+
+/* Format version number for CDF file */
+/* Written twice at the beginning of pre-2.6 CDF file */
+#define CDFMAGIC 0x0000FFFF
+
+/* Format version number for HDF file */
+#define HDFXMAGIC 0x0e031301 /* ^N^C^S^A */
+
+/* Format version number for netCDF classic file */
+#define NCMAGIC 0x43444601 /* C D F 1 */
+
+/* Format version number for 64-bit offset file */
+#define NCMAGIC64 0x43444602 /* C D F 2 */
+
+/* Format version number for link file */
+#define NCLINKMAGIC 0x43444c01 /* C D L 1 */
/* #ifndef HDF *//* HDF has already worked out if we have prototypes */
#ifdef HDF
#define PROTOTYPE
#endif
#undef PROTO
-#ifndef NO_HAVE_PROTOTYPES
-# define PROTO(x) x
+#ifndef NO_HAVE_PROTOTYPES
+# define PROTO(x) x
#else
-# define PROTO(x) ()
+# define PROTO(x) ()
#endif
/* #endif */ /* HDF */
@@ -261,7 +274,7 @@ extern "C" {
#endif
/* If using the real netCDF library and API (when --disable-netcdf configure flag is used)
- need to mangle the HDF versions of netCDF API function names
+ need to mangle the HDF versions of netCDF API function names
to not conflict w/ oriinal netCDF ones */
#ifndef H4_HAVE_NETCDF
#define nc_serror HNAME(nc_serror)
@@ -452,165 +465,165 @@ HDFFCLIBAPI int nncsfil
PROTO((int* cdfid, int* fillmode, int* rcode));
#endif
-HDFLIBAPI void nc_serror PROTO((
- const char *fmt,
- ...
+HDFLIBAPI void nc_serror PROTO((
+ const char *fmt,
+ ...
)) ;
-HDFLIBAPI void NCadvise PROTO((
- int err,
- const char *fmt,
- ...
+HDFLIBAPI void NCadvise PROTO((
+ int err,
+ const char *fmt,
+ ...
)) ;
-HDFLIBAPI int NC_computeshapes PROTO((
- NC *handle
+HDFLIBAPI int NC_computeshapes PROTO((
+ NC *handle
));
-HDFLIBAPI int NC_xtypelen PROTO((
- nc_type type
+HDFLIBAPI int NC_xtypelen PROTO((
+ nc_type type
));
-HDFLIBAPI int NC_xlen_array PROTO((
- NC_array *array
+HDFLIBAPI int NC_xlen_array PROTO((
+ NC_array *array
));
-HDFLIBAPI int NC_xlen_attr PROTO((
- NC_attr **app
+HDFLIBAPI int NC_xlen_attr PROTO((
+ NC_attr **app
));
-HDFLIBAPI int NC_xlen_cdf PROTO((
- NC *cdf
+HDFLIBAPI int NC_xlen_cdf PROTO((
+ NC *cdf
));
-HDFLIBAPI int NC_xlen_dim PROTO((
- NC_dim **dpp
+HDFLIBAPI int NC_xlen_dim PROTO((
+ NC_dim **dpp
));
-HDFLIBAPI int NC_xlen_iarray PROTO((
- NC_iarray *iarray
+HDFLIBAPI int NC_xlen_iarray PROTO((
+ NC_iarray *iarray
));
-HDFLIBAPI int NC_xlen_string PROTO((
- NC_string *cdfstr
+HDFLIBAPI int NC_xlen_string PROTO((
+ NC_string *cdfstr
));
-HDFLIBAPI int NC_xlen_var PROTO((
- NC_var **vpp
+HDFLIBAPI int NC_xlen_var PROTO((
+ NC_var **vpp
));
-HDFLIBAPI char *NCmemset PROTO((
- char *s,
- int c,
- int n
+HDFLIBAPI char *NCmemset PROTO((
+ char *s,
+ int c,
+ int n
));
-HDFLIBAPI void NC_arrayfill PROTO((
- void *lo,
- size_t len,
- nc_type type
+HDFLIBAPI void NC_arrayfill PROTO((
+ void *lo,
+ size_t len,
+ nc_type type
));
-HDFLIBAPI void NC_copy_arrayvals PROTO((
- char *target,
- NC_array *array
+HDFLIBAPI void NC_copy_arrayvals PROTO((
+ char *target,
+ NC_array *array
));
-HDFLIBAPI int NC_free_array PROTO((
- NC_array *array
+HDFLIBAPI int NC_free_array PROTO((
+ NC_array *array
));
-HDFLIBAPI int NC_free_attr PROTO((
- NC_attr *attr
+HDFLIBAPI int NC_free_attr PROTO((
+ NC_attr *attr
));
-HDFLIBAPI int NC_free_cdf PROTO((
- NC *handle
+HDFLIBAPI int NC_free_cdf PROTO((
+ NC *handle
));
-HDFLIBAPI int NC_free_dim PROTO((
- NC_dim *dim
+HDFLIBAPI int NC_free_dim PROTO((
+ NC_dim *dim
));
-HDFLIBAPI int NC_free_iarray PROTO((
- NC_iarray *iarray
+HDFLIBAPI int NC_free_iarray PROTO((
+ NC_iarray *iarray
));
-HDFLIBAPI int NC_free_string PROTO((
- NC_string *cdfstr
+HDFLIBAPI int NC_free_string PROTO((
+ NC_string *cdfstr
));
-HDFLIBAPI int NC_free_var PROTO((
- NC_var *var
+HDFLIBAPI int NC_free_var PROTO((
+ NC_var *var
));
-HDFLIBAPI Void *NC_incr_array PROTO((
- NC_array *array,
- Void *tail
+HDFLIBAPI Void *NC_incr_array PROTO((
+ NC_array *array,
+ Void *tail
));
HDFLIBAPI int NC_dimid PROTO((
NC *handle,
char *name
));
-HDFLIBAPI bool_t NCcktype PROTO((
- nc_type datatype
+HDFLIBAPI bool_t NCcktype PROTO((
+ nc_type datatype
));
-HDFLIBAPI bool_t NC_indefine PROTO((
- int cdfid,
- bool_t iserr
+HDFLIBAPI bool_t NC_indefine PROTO((
+ int cdfid,
+ bool_t iserr
));
-HDFLIBAPI bool_t xdr_cdf PROTO((
- XDR *xdrs,
- NC **handlep
+HDFLIBAPI bool_t xdr_cdf PROTO((
+ XDR *xdrs,
+ NC **handlep
));
-HDFLIBAPI bool_t xdr_numrecs PROTO((
- XDR *xdrs,
- NC *handle
+HDFLIBAPI bool_t xdr_numrecs PROTO((
+ XDR *xdrs,
+ NC *handle
));
-HDFLIBAPI bool_t xdr_shorts PROTO((
- XDR *xdrs,
- short *sp,
- u_int cnt
+HDFLIBAPI bool_t xdr_shorts PROTO((
+ XDR *xdrs,
+ short *sp,
+ u_int cnt
));
-HDFLIBAPI bool_t xdr_NC_array PROTO((
- XDR *xdrs,
- NC_array **app
+HDFLIBAPI bool_t xdr_NC_array PROTO((
+ XDR *xdrs,
+ NC_array **app
));
-HDFLIBAPI bool_t xdr_NC_attr PROTO((
- XDR *xdrs,
- NC_attr **app
+HDFLIBAPI bool_t xdr_NC_attr PROTO((
+ XDR *xdrs,
+ NC_attr **app
));
-HDFLIBAPI bool_t xdr_NC_dim PROTO((
- XDR *xdrs,
- NC_dim **dpp
+HDFLIBAPI bool_t xdr_NC_dim PROTO((
+ XDR *xdrs,
+ NC_dim **dpp
));
-HDFLIBAPI bool_t xdr_NC_fill PROTO((
- XDR *xdrs,
- NC_var *vp
+HDFLIBAPI bool_t xdr_NC_fill PROTO((
+ XDR *xdrs,
+ NC_var *vp
));
-HDFLIBAPI bool_t xdr_NC_iarray PROTO((
- XDR *xdrs,
- NC_iarray **ipp
+HDFLIBAPI bool_t xdr_NC_iarray PROTO((
+ XDR *xdrs,
+ NC_iarray **ipp
));
-HDFLIBAPI bool_t xdr_NC_string PROTO((
- XDR *xdrs,
- NC_string **spp
+HDFLIBAPI bool_t xdr_NC_string PROTO((
+ XDR *xdrs,
+ NC_string **spp
));
-HDFLIBAPI bool_t xdr_NC_var PROTO((
- XDR *xdrs,
- NC_var **vpp
+HDFLIBAPI bool_t xdr_NC_var PROTO((
+ XDR *xdrs,
+ NC_var **vpp
));
-HDFLIBAPI size_t NC_typelen PROTO((
- nc_type type
+HDFLIBAPI size_t NC_typelen PROTO((
+ nc_type type
));
-HDFLIBAPI NC *NC_check_id PROTO((
- int cdfid
+HDFLIBAPI NC *NC_check_id PROTO((
+ int cdfid
));
-HDFLIBAPI NC *NC_dup_cdf PROTO((
+HDFLIBAPI NC *NC_dup_cdf PROTO((
const char *name,
- int mode,
- NC *old
+ int mode,
+ NC *old
));
-HDFLIBAPI NC *NC_new_cdf PROTO((
+HDFLIBAPI NC *NC_new_cdf PROTO((
const char *name,
- int mode
+ int mode
));
-HDFLIBAPI NC_array *NC_new_array PROTO((
- nc_type type,
- unsigned count,
- const void *values
+HDFLIBAPI NC_array *NC_new_array PROTO((
+ nc_type type,
+ unsigned count,
+ const void *values
));
-HDFLIBAPI NC_array *NC_re_array PROTO((
- NC_array *old,
- nc_type type,
- unsigned count,
- const void *values
+HDFLIBAPI NC_array *NC_re_array PROTO((
+ NC_array *old,
+ nc_type type,
+ unsigned count,
+ const void *values
));
HDFLIBAPI NC_attr *NC_new_attr PROTO((
const char *name,
@@ -618,58 +631,58 @@ HDFLIBAPI NC_attr *NC_new_attr PROTO((
unsigned count ,
const void *values
));
-HDFLIBAPI NC_attr **NC_findattr PROTO((
- NC_array **ap,
- const char *name
-));
-HDFLIBAPI NC_dim *NC_new_dim PROTO((
- const char *name,
- long size
-));
-HDFLIBAPI NC_iarray *NC_new_iarray PROTO((
- unsigned count,
- const int values[]
-));
-HDFLIBAPI NC_string *NC_new_string PROTO((
- unsigned count,
- const char *str
-));
-HDFLIBAPI NC_string *NC_re_string PROTO((
- NC_string *old,
- unsigned count,
- const char *str
-));
-HDFLIBAPI NC_var *NC_hlookupvar PROTO((
- NC *handle,
- int varid
-));
-HDFLIBAPI NC_var *NC_new_var PROTO((
- const char *name,
- nc_type type,
- int ndims,
- const int *dims
-));
-HDFLIBAPI int NCvario PROTO((
- NC *handle,
- int varid,
- const long *start,
- const long *edges,
- void *values
-));
-HDFLIBAPI bool_t NCcoordck PROTO((
- NC *handle,
- NC_var *vp,
- const long *coords
+HDFLIBAPI NC_attr **NC_findattr PROTO((
+ NC_array **ap,
+ const char *name
+));
+HDFLIBAPI NC_dim *NC_new_dim PROTO((
+ const char *name,
+ long size
+));
+HDFLIBAPI NC_iarray *NC_new_iarray PROTO((
+ unsigned count,
+ const int values[]
+));
+HDFLIBAPI NC_string *NC_new_string PROTO((
+ unsigned count,
+ const char *str
+));
+HDFLIBAPI NC_string *NC_re_string PROTO((
+ NC_string *old,
+ unsigned count,
+ const char *str
+));
+HDFLIBAPI NC_var *NC_hlookupvar PROTO((
+ NC *handle,
+ int varid
+));
+HDFLIBAPI NC_var *NC_new_var PROTO((
+ const char *name,
+ nc_type type,
+ int ndims,
+ const int *dims
+));
+HDFLIBAPI int NCvario PROTO((
+ NC *handle,
+ int varid,
+ const long *start,
+ const long *edges,
+ void *values
+));
+HDFLIBAPI bool_t NCcoordck PROTO((
+ NC *handle,
+ NC_var *vp,
+ const long *coords
));
HDFLIBAPI bool_t xdr_NCvshort PROTO((
XDR *xdrs,
unsigned which,
short *values
));
-HDFLIBAPI bool_t NC_dcpy PROTO((
- XDR *target,
- XDR *source,
- long nbytes
+HDFLIBAPI bool_t NC_dcpy PROTO((
+ XDR *target,
+ XDR *source,
+ long nbytes
));
HDFLIBAPI int NCxdrfile_sync
PROTO((XDR *xdrs));
@@ -791,6 +804,9 @@ HDFLIBAPI intn HDiscdf
HDFLIBAPI intn HDisnetcdf
(const char *filename);
+HDFLIBAPI intn HDisnetcdf64
+ (const char *filename);
+
#endif /* HDF */
#ifdef __cplusplus
diff --git a/mfhdf/libsrc/mfsd.c b/mfhdf/libsrc/mfsd.c
index 5b5db7b..94dc803 100644
--- a/mfhdf/libsrc/mfsd.c
+++ b/mfhdf/libsrc/mfsd.c
@@ -11,11 +11,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 6036 $";
-#endif
-
-/* $Id: mfsd.c 6036 2014-01-20 17:28:01Z acheng $ */
+/* $Id: mfsd.c 6441 2016-06-14 21:31:36Z bmribler $ */
/******************************************************************************
file - mfsd.c
@@ -136,16 +132,14 @@ NC *
SDIhandle_from_id(int32 id, /* IN: an object (file, dim, dataset) ID */
intn typ /* IN: IN: the type of ID this is */)
{
+ CONSTR(FUNC, "SDIhandle_from_id"); /* for HGOTO_ERROR */
int32 tmp;
NC *ret_value = NULL;
/* check that it is the proper type of id */
tmp = (id >> 16) & 0x0f;
if(tmp != typ)
- {
- ret_value = NULL;
- goto done;
- }
+ HGOTO_ERROR(DFE_ARGS, NULL);
/* get the file from top 12 bits*/
tmp = (id >> 20) & 0xfff;
@@ -177,6 +171,7 @@ NC_var *
SDIget_var(NC *handle, /* IN: the handle for this file */
int32 sdsid /* IN: a dataset ID */)
{
+ CONSTR(FUNC, "SDIget_var"); /* for HGOTO_ERROR */
int32 varid;
NC_array **ap = NULL;
NC_var *ret_value = NULL;
@@ -192,10 +187,7 @@ SDIget_var(NC *handle, /* IN: the handle for this file */
ap += varid;
}
else
- {
- ret_value = NULL;
- goto done;
- }
+ HGOTO_ERROR(DFE_ARGS, NULL);
ret_value = ((NC_var *)*ap);
@@ -225,6 +217,7 @@ NC_dim *
SDIget_dim(NC *handle,/* IN: the handle for this file */
int32 id /* IN: a dimension ID */)
{
+ CONSTR(FUNC, "SDIget_dim"); /* for HGOTO_ERROR */
int32 dimindex;
NC_array **ap = NULL;
NC_dim *ret_value = NULL;
@@ -240,10 +233,7 @@ SDIget_dim(NC *handle,/* IN: the handle for this file */
ap += dimindex;
}
else
- {
- ret_value = NULL;
- goto done;
- }
+ HGOTO_ERROR(DFE_ARGS, NULL);
ret_value = ((NC_dim *)*ap);
@@ -272,7 +262,7 @@ static intn
SDIstart(void)
{
CONSTR(FUNC, "SDIstart"); /* for HGOTO_ERROR */
- intn ret_value = SUCCEED;
+ intn ret_value = SUCCEED;
/* Don't call this routine again... */
library_terminate = TRUE;
@@ -436,6 +426,7 @@ done:
intn
SDend(int32 id /* IN: file ID of file to close */)
{
+ CONSTR(FUNC, "SDend"); /* for HGOTO_ERROR */
intn cdfid;
NC *handle = NULL;
intn ret_value = SUCCEED;
@@ -455,10 +446,7 @@ SDend(int32 id /* IN: file ID of file to close */)
/* get the handle */
handle = SDIhandle_from_id(id, CDFTYPE);
if(handle == NULL)
- {
- ret_value = FAIL;
- goto done;
- }
+ HGOTO_ERROR(DFE_ARGS, FAIL);
/* make sure we can write to the file */
if(handle->flags & NC_RDWR)
@@ -470,10 +458,7 @@ SDend(int32 id /* IN: file ID of file to close */)
if(handle->flags & NC_HDIRTY)
{
if(!xdr_cdf(handle->xdrs, &handle))
- {
- ret_value = FAIL;
- goto done;
- }
+ HGOTO_ERROR(DFE_XDRERROR, FAIL);
handle->flags &= ~(NC_NDIRTY | NC_HDIRTY);
}
@@ -485,8 +470,7 @@ SDend(int32 id /* IN: file ID of file to close */)
{
if(!xdr_numrecs(handle->xdrs, handle))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_XDRERROR, FAIL);
}
if (handle->file_type != HDF_FILE)
@@ -531,6 +515,7 @@ SDfileinfo(int32 fid, /* IN: file ID */
int32 *datasets,/* OUT: number of datasets in the file */
int32 *attrs /* OUT: number of global attributes */)
{
+ CONSTR(FUNC, "SDfileinfo"); /* for HGOTO_ERROR */
NC *handle = NULL;
intn ret_value = SUCCEED;
@@ -545,8 +530,7 @@ SDfileinfo(int32 fid, /* IN: file ID */
handle = SDIhandle_from_id(fid, CDFTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
#ifdef SDDEBUG
@@ -600,6 +584,7 @@ int32
SDselect(int32 fid, /* IN: file ID */
int32 index /* IN: index of dataset to get ID for */)
{
+ CONSTR(FUNC, "SDselect"); /* for HGOTO_ERROR */
NC *handle = NULL;
int32 sdsid; /* the id we're gonna build */
int32 ret_value = FAIL;
@@ -615,21 +600,18 @@ SDselect(int32 fid, /* IN: file ID */
handle = SDIhandle_from_id(fid, CDFTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* check that a data set with this index exists */
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if((unsigned)index >= handle->vars->count)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* create SDS id to return */
@@ -950,6 +932,7 @@ int32
SDnametoindex(int32 fid, /* IN: file ID */
const char *name /* IN: name of dataset to search for */)
{
+ CONSTR(FUNC, "SDnametoindex"); /* for HGOTO_ERROR */
unsigned ii;
intn len;
NC *handle = NULL;
@@ -965,14 +948,12 @@ SDnametoindex(int32 fid, /* IN: file ID */
handle = SDIhandle_from_id(fid, CDFTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
len = HDstrlen(name) ;
@@ -982,8 +963,7 @@ SDnametoindex(int32 fid, /* IN: file ID */
if( len == (*dp)->name->len
&& HDstrncmp(name, (*dp)->name->values, HDstrlen(name)) == 0)
{
- ret_value = (int32)ii;
- goto done;
+ HGOTO_DONE((int32)ii);
}
}
@@ -1022,6 +1002,7 @@ SDgetnumvars_byname(int32 fid, /* IN: file ID */
const char *name, /* IN: name of dataset to search for */
int32* n_vars)
{
+ CONSTR(FUNC, "SDgetnumvars_byname"); /* for HGOTO_ERROR */
unsigned ii;
intn len;
int32 count = 0;
@@ -1040,14 +1021,12 @@ SDgetnumvars_byname(int32 fid, /* IN: file ID */
handle = SDIhandle_from_id(fid, CDFTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
len = HDstrlen(name) ;
@@ -1098,6 +1077,7 @@ SDnametoindices(int32 fid, /* IN: file ID */
const char *name, /* IN: name of dataset to search for */
hdf_varlist_t* var_list)
{
+ CONSTR(FUNC, "SDnametoindices"); /* for HGOTO_ERROR */
unsigned ii;
intn len;
NC *handle = NULL;
@@ -1116,14 +1096,12 @@ SDnametoindices(int32 fid, /* IN: file ID */
handle = SDIhandle_from_id(fid, CDFTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
len = HDstrlen(name) ;
@@ -1176,6 +1154,7 @@ SDgetrange(int32 sdsid, /* IN: dataset ID */
void * pmax, /* OUT: valid max */
void * pmin /* OUT: valid min */)
{
+ CONSTR(FUNC, "SDgetrange"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_var *var = NULL;
NC_attr **attr = NULL;
@@ -1194,15 +1173,13 @@ SDgetrange(int32 sdsid, /* IN: dataset ID */
handle = SDIhandle_from_id(sdsid, SDSTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
var = SDIget_var(handle, sdsid);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
attr = (NC_attr **) NC_findattr(&(var->attrs), _HDF_ValidRange);
@@ -1223,8 +1200,7 @@ SDgetrange(int32 sdsid, /* IN: dataset ID */
#ifdef SDDEBUG
fprintf(stderr, "No dice on range info (missing at least one)\n");
#endif
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_RANGE, FAIL);
}
if(((*attr1)->HDFtype != var->HDFtype)
@@ -1233,8 +1209,7 @@ SDgetrange(int32 sdsid, /* IN: dataset ID */
#ifdef SDDEBUG
fprintf(stderr, "No dice on range info (wrong types)\n");
#endif
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_RANGE, FAIL);
}
NC_copy_arrayvals((char *)pmax, (*attr1)->data) ;
@@ -1311,6 +1286,7 @@ SDcreate(int32 fid, /* IN: file ID */
int32 rank, /* IN: rank of dataset */
int32 *dimsizes /* IN: array of dimension sizes */)
{
+ CONSTR(FUNC, "SDcreate"); /* for HGOTO_ERROR */
intn i;
NC *handle = NULL;
NC_var *var = NULL;
@@ -1334,8 +1310,7 @@ SDcreate(int32 fid, /* IN: file ID */
handle = SDIhandle_from_id(fid, CDFTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* fudge the name since its optional */
@@ -1361,26 +1336,22 @@ SDcreate(int32 fid, /* IN: file ID */
dims = (intn *) HDmalloc(rank * sizeof(intn));
if(dims == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
}
if(rank > H4_MAX_VAR_DIMS)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
for(i = 0; i < rank; i++)
{
-
num = (handle->dims ? handle->dims->count : 0);
sprintf(dimname, "fakeDim%d", num);
newdim = (NC_dim *) NC_new_dim(dimname, dimsizes[i]);
if(newdim == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
if(handle->dims == NULL)
@@ -1388,16 +1359,14 @@ SDcreate(int32 fid, /* IN: file ID */
handle->dims = NC_new_array(NC_DIMENSION,(unsigned)1, (Void *)&newdim);
if(handle->dims == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
}
else
{
if( NC_incr_array(handle->dims, (Void *)&newdim) == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
}
@@ -1408,19 +1377,13 @@ SDcreate(int32 fid, /* IN: file ID */
/* create the actual variable */
if ((nctype = hdf_unmap_type((int)nt)) == FAIL)
{
-#ifdef SDDEBUG
- /* replace it with NCAdvice or HERROR? */
- fprintf(stderr "SDcreate: hdf_unmap_type failed for %d\n", nt);
-#endif
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
var = (NC_var *) NC_new_var(name, nctype, (int)rank, dims);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
/* Set the "newly created" & "set length" flags for use in SDwritedata */
@@ -1435,8 +1398,7 @@ SDcreate(int32 fid, /* IN: file ID */
var->HDFtype = nt;
if (FAIL == (var->HDFsize = DFKNTsize(nt)))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
var->cdf = handle; /* set cdf before calling NC_var_shape */
@@ -1463,23 +1425,20 @@ SDcreate(int32 fid, /* IN: file ID */
handle->vars = NC_new_array(NC_VARIABLE,(unsigned)1, (Void *)&var);
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
}
else
{
if(handle->vars->count >= H4_MAX_NC_VARS)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_EXCEEDMAX, FAIL);
}
else
{
if( NC_incr_array(handle->vars, (Void *)&var) == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
}
}
@@ -1487,8 +1446,7 @@ SDcreate(int32 fid, /* IN: file ID */
/* compute all of the shape information */
if(NC_var_shape(var, handle->dims) == -1)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
/* create a handle we can give back to the user */
@@ -1563,30 +1521,26 @@ SDgetdimid(int32 sdsid, /* IN: dataset ID */
handle = SDIhandle_from_id(sdsid, SDSTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* get the variable */
var = SDIget_var(handle, sdsid);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* check if enough / too many dims */
if((var->assoc == NULL) || (var->assoc->count < (unsigned)number))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* get the dim number out of the assoc array */
if (var->assoc->values == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
dimindex = var->assoc->values[number];
@@ -1625,6 +1579,7 @@ intn
SDsetdimname(int32 id, /* IN: dataset ID */
const char *name /* IN: dimension name */)
{
+ CONSTR(FUNC, "SDsetdimname"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_dim *dim = NULL;
NC_dim **dp = NULL;
@@ -1646,16 +1601,14 @@ SDsetdimname(int32 id, /* IN: dataset ID */
handle = SDIhandle_from_id(id, DIMTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* get the dimension structure */
dim = SDIget_dim(handle, id);
if(dim == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* check for name in use */
@@ -1672,8 +1625,7 @@ SDsetdimname(int32 id, /* IN: dataset ID */
/* so change to point to it */
if(dim->size != (*dp)->size)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_BADDIMNAME, FAIL);
}
ap = (NC_array **) handle->dims->values;
@@ -1681,8 +1633,7 @@ SDsetdimname(int32 id, /* IN: dataset ID */
NC_free_dim(dim);
(*dp)->count += 1;
(*ap) = (NC_array *) (*dp);
- ret_value = SUCCEED;
- goto done;
+ HGOTO_DONE(SUCCEED);
}
}
}
@@ -1692,8 +1643,7 @@ SDsetdimname(int32 id, /* IN: dataset ID */
new = NC_new_string((unsigned)HDstrlen(name),name);
if(new == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
dim->name = new;
@@ -1731,6 +1681,7 @@ done:
intn
SDendaccess(int32 id /* IN: dataset ID */)
{
+ CONSTR(FUNC, "SDendaccess"); /* for HGOTO_ERROR */
NC *handle;
int32 ret_value = SUCCEED;
@@ -1745,8 +1696,7 @@ SDendaccess(int32 id /* IN: dataset ID */)
handle = SDIhandle_from_id(id, SDSTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
#ifdef SYNC_ON_EACC
@@ -1761,8 +1711,7 @@ SDendaccess(int32 id /* IN: dataset ID */)
{
if(!xdr_cdf(handle->xdrs, &handle) )
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_XDRERROR, FAIL);
}
handle->flags &= ~(NC_NDIRTY | NC_HDIRTY);
@@ -1774,8 +1723,7 @@ SDendaccess(int32 id /* IN: dataset ID */)
{
if(!xdr_numrecs(handle->xdrs, handle) )
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_XDRERROR, FAIL);
}
handle->flags &= ~(NC_NDIRTY);
@@ -1822,6 +1770,7 @@ SDIputattr(NC_array **ap, /* IN/OUT: attribute list */
intn count,/* IN: number of attribute values */
const void * data /* IN: attribute values */)
{
+ CONSTR(FUNC, "SDIputattr"); /* for HGOTO_ERROR */
NC_attr *attr = NULL;
NC_attr **atp = NULL;
NC_attr *old = NULL;
@@ -1834,9 +1783,7 @@ SDIputattr(NC_array **ap, /* IN/OUT: attribute list */
if ((type = hdf_unmap_type((int)nt)) == FAIL)
{
- /* replace it with NCAdvice or HERROR? */
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(*ap == NULL)
@@ -1844,16 +1791,14 @@ SDIputattr(NC_array **ap, /* IN/OUT: attribute list */
attr = (NC_attr *) NC_new_attr(name,type,(unsigned)count,data) ;
if(attr == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
attr->HDFtype = nt; /* Add HDFtype */
*ap = NC_new_array(NC_ATTRIBUTE,(unsigned)1, (Void*)&attr) ;
if(*ap == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
}
else
@@ -1865,8 +1810,7 @@ SDIputattr(NC_array **ap, /* IN/OUT: attribute list */
if(*atp == NULL)
{
*atp = old;
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
(*atp)->HDFtype = nt; /* Add HDFtype */
NC_free_attr(old);
@@ -1875,8 +1819,7 @@ SDIputattr(NC_array **ap, /* IN/OUT: attribute list */
{
if((*ap)->count >= H4_MAX_NC_ATTRS)
{ /* Too many */
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_EXCEEDMAX, FAIL);
}
/* just add it */
@@ -1884,14 +1827,12 @@ SDIputattr(NC_array **ap, /* IN/OUT: attribute list */
attr->HDFtype = nt; /* Add HDFtype */
if(attr == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
if(NC_incr_array((*ap), (Void *)&attr) == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
}
}
@@ -1930,6 +1871,7 @@ SDsetrange(int32 sdsid, /* IN: dataset ID */
void * pmax, /* IN: valid max */
void * pmin /* IN: valid min */)
{
+ CONSTR(FUNC, "SDsetrange"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_var *var = NULL;
uint8 data[80];
@@ -1946,28 +1888,24 @@ SDsetrange(int32 sdsid, /* IN: dataset ID */
handle = SDIhandle_from_id(sdsid, SDSTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
var = SDIget_var(handle, sdsid);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if((pmax == NULL) || (pmin == NULL))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* move data values over */
if (FAIL == (sz = DFKNTsize(var->HDFtype | DFNT_NATIVE)))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
HDmemcpy(data, pmin, sz);
@@ -1976,8 +1914,8 @@ SDsetrange(int32 sdsid, /* IN: dataset ID */
/* call common code */
if(SDIputattr(&var->attrs, _HDF_ValidRange, var->HDFtype, (intn) 2, data) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTSETATTR, FAIL);
+ /* Should propagate error code */
}
/* make sure it gets reflected in the file */
@@ -2012,6 +1950,7 @@ SDIapfromid(int32 id, /* IN: object ID */
NC **handlep, /* IN: handle for this file */
NC_array ***app /* OUT: attribute list */)
{
+ CONSTR(FUNC, "SDIapfromid"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_var *var = NULL;
NC_dim *dim = NULL;
@@ -2026,14 +1965,12 @@ SDIapfromid(int32 id, /* IN: object ID */
var = SDIget_var(handle, id);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
(*app) = &(var->attrs);
(*handlep) = handle;
- ret_value = SUCCEED;
- goto done;
+ HGOTO_DONE(SUCCEED);
}
/* see if its a file ID */
@@ -2042,8 +1979,7 @@ SDIapfromid(int32 id, /* IN: object ID */
{
(*app) = &(handle->attrs);
(*handlep) = handle;
- ret_value = SUCCEED;
- goto done;
+ HGOTO_DONE(SUCCEED);
}
/* see if its a dimension ID */
@@ -2052,10 +1988,11 @@ SDIapfromid(int32 id, /* IN: object ID */
{
/* find the dimension */
dim = SDIget_dim(handle, id);
+
+ /* the ID is neither file, data set, nor dimension ID */
if(dim == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* get index of coordinate variable */
@@ -2065,15 +2002,13 @@ SDIapfromid(int32 id, /* IN: object ID */
var = NC_hlookupvar(handle, varid);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
(*app) = &(var->attrs);
(*handlep) = handle;
- ret_value = SUCCEED;
- goto done;
+ HGOTO_DONE(SUCCEED);
}
ret_value = FAIL;
@@ -2110,6 +2045,7 @@ SDsetattr(int32 id, /* IN: object ID */
int32 count, /* IN: number of attribute values */
const void * data /* IN: attribute values */)
{
+ CONSTR(FUNC, "SDsetattr"); /* for HGOTO_ERROR */
NC_array **ap = NULL;
NC *handle = NULL;
intn sz;
@@ -2125,59 +2061,51 @@ SDsetattr(int32 id, /* IN: object ID */
/* Sanity check args */
if(name == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* This release doesn't support native number types for attr */
if (nt & DFNT_NATIVE)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* Only positive count is valid (bug HDFFR-989) -BMR */
if (count <= 0)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* Make sure that count is less than MAX_ORDER(Vdata)
and total size is less than MAX_FIELD_SIZE(Vdata) */
if (FAIL == (sz = DFKNTsize(nt)))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if ((count > MAX_ORDER) ||
((count * sz) > MAX_FIELD_SIZE))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* determine what type of ID we've been given */
if(SDIapfromid(id, &handle, &ap) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* still no handle ? */
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* hand over to SDIputattr */
if(SDIputattr(ap, name, nt, count, data) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTSETATTR, FAIL);
}
/* make sure it gets reflected in the file */
@@ -2237,15 +2165,13 @@ SDattrinfo(int32 id, /* IN: object ID */
/* determine what type of ID we've been given */
if(SDIapfromid(id, &handle, &app) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
ap = (*app);
if((ap == NULL) || (index >= ap->count))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/*
@@ -2255,8 +2181,7 @@ SDattrinfo(int32 id, /* IN: object ID */
atp = (NC_attr **) ((char *)ap->values + index * ap->szof);
if(*atp == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* move the information over */
@@ -2323,15 +2248,13 @@ SDreadattr(int32 id, /* IN: object ID */
/* determine what type of ID we've been given */
if(SDIapfromid(id, &handle, &app) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
ap = (*app);
if((ap == NULL) || (index >= ap->count))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/*
@@ -2341,8 +2264,7 @@ SDreadattr(int32 id, /* IN: object ID */
atp = (NC_attr **) ((char *)ap->values + index * ap->szof);
if(*atp == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* move the information over */
@@ -2450,7 +2372,7 @@ SDwritedata(int32 sdsid, /* IN: dataset ID */
HGOTO_ERROR(DFE_BADCODER, FAIL);
}
}
- /* The case status=FAIL is not handled, not sure if it's intentional. -BMR */
+ /* When HCPgetcomptype returns FAIL, assume no compression */
} /* file is HDF */
/* get ready to write */
@@ -2479,8 +2401,7 @@ SDwritedata(int32 sdsid, /* IN: dataset ID */
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
no_strides=1;
@@ -2504,8 +2425,7 @@ SDwritedata(int32 sdsid, /* IN: dataset ID */
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
for(i = 0; i < var->assoc->count; i++)
@@ -2581,6 +2501,7 @@ SDsetdatastrs(int32 sdsid, /* IN: dataset ID */
const char *f, /* IN: format string ("format") */
const char *c /* IN: coordsys string ("coordsys") */)
{
+ CONSTR(FUNC, "SDsetdatastrs"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_var *var = NULL;
intn ret_value = SUCCEED;
@@ -2595,21 +2516,18 @@ SDsetdatastrs(int32 sdsid, /* IN: dataset ID */
handle = SDIhandle_from_id(sdsid, SDSTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
var = SDIget_var(handle, sdsid);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(l && l[0] != '\0')
@@ -2617,8 +2535,7 @@ SDsetdatastrs(int32 sdsid, /* IN: dataset ID */
if(SDIputattr(&var->attrs, _HDF_LongName, DFNT_CHAR,
(intn) HDstrlen(l), l) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTSETATTR, FAIL);
}
}
@@ -2627,8 +2544,7 @@ SDsetdatastrs(int32 sdsid, /* IN: dataset ID */
if(SDIputattr(&var->attrs, _HDF_Units, DFNT_CHAR,
(intn) HDstrlen(u), u) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTSETATTR, FAIL);
}
}
@@ -2637,8 +2553,7 @@ SDsetdatastrs(int32 sdsid, /* IN: dataset ID */
if(SDIputattr(&var->attrs, _HDF_Format, DFNT_CHAR,
(intn) HDstrlen(f), f) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTSETATTR, FAIL);
}
}
@@ -2647,8 +2562,7 @@ SDsetdatastrs(int32 sdsid, /* IN: dataset ID */
if(SDIputattr(&var->attrs, _HDF_CoordSys, DFNT_CHAR,
(intn) HDstrlen(c), c) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTSETATTR, FAIL);
}
}
@@ -2687,6 +2601,7 @@ SDsetcal(int32 sdsid,/* IN: dataset ID */
float64 ioffe,/* IN: integer offset error */
int32 nt /* IN: number type of uncalibrated data */)
{
+ CONSTR(FUNC, "SDsetcal"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_var *var = NULL;
intn ret_value = SUCCEED;
@@ -2701,56 +2616,48 @@ SDsetcal(int32 sdsid,/* IN: dataset ID */
handle = SDIhandle_from_id(sdsid, SDSTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
var = SDIget_var(handle, sdsid);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(SDIputattr(&var->attrs, _HDF_ScaleFactor, DFNT_FLOAT64,
(intn) 1, &cal) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTSETATTR, FAIL);
}
if(SDIputattr(&var->attrs, _HDF_ScaleFactorErr, DFNT_FLOAT64,
(intn) 1, &cale) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTSETATTR, FAIL);
}
if(SDIputattr(&var->attrs, _HDF_AddOffset, DFNT_FLOAT64,
(intn) 1, &ioff) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTSETATTR, FAIL);
}
if(SDIputattr(&var->attrs, _HDF_AddOffsetErr, DFNT_FLOAT64,
(intn) 1, &ioffe) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTSETATTR, FAIL);
}
if(SDIputattr(&var->attrs, _HDF_CalibratedNt, DFNT_INT32,
(intn) 1, &nt) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTSETATTR, FAIL);
}
/* make sure it gets reflected in the file */
@@ -2784,6 +2691,7 @@ intn
SDsetfillvalue(int32 sdsid, /* IN: dataset ID */
void * val /* IN: fillvalue */)
{
+ CONSTR(FUNC, "SDsetfillvalue"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_var *var = NULL;
intn ret_value = SUCCEED;
@@ -2798,28 +2706,24 @@ SDsetfillvalue(int32 sdsid, /* IN: dataset ID */
handle = SDIhandle_from_id(sdsid, SDSTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
var = SDIget_var(handle, sdsid);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(SDIputattr(&var->attrs, _FillValue, var->HDFtype,
(intn) 1, val) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTSETATTR, FAIL);
}
/* make sure it gets reflected in the file */
@@ -2874,28 +2778,24 @@ SDgetfillvalue(int32 sdsid, /* IN: dataset ID */
handle = SDIhandle_from_id(sdsid, SDSTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
var = SDIget_var(handle, sdsid);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
attr = (NC_attr **) NC_findattr(&(var->attrs), _FillValue);
if(attr == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTGETATTR, FAIL);
}
NC_copy_arrayvals((char *)val, (*attr)->data) ;
@@ -2934,6 +2834,7 @@ SDgetdatastrs(int32 sdsid, /* IN: dataset ID */
char *c, /* OUT: coordsys string ("coordsys") */
intn len /* IN: buffer length */)
{
+ CONSTR(FUNC, "SDgetdatastrs"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_var *var = NULL;
NC_attr **attr = NULL;
@@ -2949,21 +2850,18 @@ SDgetdatastrs(int32 sdsid, /* IN: dataset ID */
handle = SDIhandle_from_id(sdsid, SDSTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
var = SDIget_var(handle, sdsid);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(l)
@@ -3067,6 +2965,7 @@ SDgetcal(int32 sdsid, /* IN: dataset ID */
float64 *ioffe, /* OUT: integer offset error */
int32 *nt /* OUT: number type of uncalibrated data */)
{
+ CONSTR(FUNC, "SDgetcal"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_var *var = NULL;
NC_attr **attr = NULL;
@@ -3082,60 +2981,52 @@ SDgetcal(int32 sdsid, /* IN: dataset ID */
handle = SDIhandle_from_id(sdsid, SDSTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
var = SDIget_var(handle, sdsid);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
attr = (NC_attr **) NC_findattr(&(var->attrs), _HDF_ScaleFactor);
if(attr == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTGETATTR, FAIL);
}
NC_copy_arrayvals((char *)cal, (*attr)->data) ;
attr = (NC_attr **) NC_findattr(&(var->attrs), _HDF_ScaleFactorErr);
if(attr == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTGETATTR, FAIL);
}
NC_copy_arrayvals((char *)cale, (*attr)->data) ;
attr = (NC_attr **) NC_findattr(&(var->attrs), _HDF_AddOffset);
if(attr == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTGETATTR, FAIL);
}
NC_copy_arrayvals((char *)ioff, (*attr)->data) ;
attr = (NC_attr **) NC_findattr(&(var->attrs), _HDF_AddOffsetErr);
if(attr == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTGETATTR, FAIL);
}
NC_copy_arrayvals((char *)ioffe, (*attr)->data) ;
attr = (NC_attr **) NC_findattr(&(var->attrs), _HDF_CalibratedNt);
if(attr == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTGETATTR, FAIL);
}
NC_copy_arrayvals((char *)nt, (*attr)->data) ;
@@ -3175,6 +3066,7 @@ SDIgetcoordvar(NC *handle, /* IN: file handle */
int32 id, /* IN: dimension ID */
int32 nt /* IN: number type to use if new variable*/)
{
+ CONSTR(FUNC, "SDIgetcoordvar"); /* for HGOTO_ERROR */
unsigned ii;
unsigned len;
nc_type nctype;
@@ -3211,10 +3103,9 @@ SDIgetcoordvar(NC *handle, /* IN: file handle */
{
#ifdef SDDEBUG
/* replace it with NCAdvice or HERROR? */
- fprintf(stderr "SDIgetcoordvar: hdf_unmap_type failed for %d\n", nt);
+ fprintf(stderr, "SDIgetcoordvar: hdf_unmap_type failed for %d\n", nt);
#endif
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
(*dp)->HDFtype = nt;
@@ -3223,21 +3114,19 @@ SDIgetcoordvar(NC *handle, /* IN: file handle */
(*dp)->szof = NC_typelen((*dp)->type);
if (FAIL == ((*dp)->HDFsize = DFKNTsize(nt)))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
/* recompute all of the shape information */
/* BUG: this may be a memory leak ??? */
if(NC_var_shape((*dp), handle->dims) == -1)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
}
- ret_value = ii; /* found it? */
- goto done;
+ /* found it? */
+ HGOTO_DONE((int32)ii);
}
}
@@ -3248,16 +3137,14 @@ SDIgetcoordvar(NC *handle, /* IN: file handle */
if ((nctype = hdf_unmap_type((int)nt)) == FAIL)
{
/* replace it with NCAdvice or HERROR? */
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
dimindex = (intn)id;
var = (NC_var *) NC_new_var(name->values, nctype, (unsigned)1, &dimindex);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* Set flag to indicate that this variable is a coordinate variable -
@@ -3278,22 +3165,19 @@ SDIgetcoordvar(NC *handle, /* IN: file handle */
/* add it to the handle */
if(handle->vars->count >= H4_MAX_NC_VARS)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
var->cdf = handle; /* set cdf before calling NC_var_shape */
/* compute all of the shape information */
if(NC_var_shape(var, handle->dims) == -1)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(NC_incr_array(handle->vars, (Void *)&var) == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
ret_value = handle->vars->count - 1;
@@ -3329,6 +3213,7 @@ SDsetdimstrs(int32 id, /* IN: dimension ID */
const char *u, /* IN: units string ("units") */
const char *f /* IN: format string ("format") */)
{
+ CONSTR(FUNC, "SDsetdimstrs"); /* for HGOTO_ERROR */
intn varid;
NC *handle = NULL;
NC_dim *dim = NULL;
@@ -3346,32 +3231,28 @@ SDsetdimstrs(int32 id, /* IN: dimension ID */
handle = SDIhandle_from_id(id, DIMTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* get the dimension structure */
dim = SDIget_dim(handle, id);
if(dim == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* look for a variable with the same name */
varid = (intn)SDIgetcoordvar(handle, dim, (int32)(id & 0xffff), (int32)0);
if(varid == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* get the variable object */
var = NC_hlookupvar(handle, varid);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* set the attributes */
@@ -3380,8 +3261,7 @@ SDsetdimstrs(int32 id, /* IN: dimension ID */
if(SDIputattr(&var->attrs, _HDF_LongName, DFNT_CHAR,
(intn) HDstrlen(l), l) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTSETATTR, FAIL);
}
}
@@ -3390,8 +3270,7 @@ SDsetdimstrs(int32 id, /* IN: dimension ID */
if(SDIputattr(&var->attrs, _HDF_Units, DFNT_CHAR,
(intn) HDstrlen(u), u) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTSETATTR, FAIL);
}
}
@@ -3400,8 +3279,7 @@ SDsetdimstrs(int32 id, /* IN: dimension ID */
if(SDIputattr(&var->attrs, _HDF_Format, DFNT_CHAR,
(intn) HDstrlen(f), f) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTSETATTR, FAIL);
}
}
@@ -3435,20 +3313,19 @@ int32
SDIfreevarAID(NC *handle, /* IN: file handle */
int32 index /* IN: variable index */)
{
+ CONSTR(FUNC, "SDIfreevarAID"); /* for HGOTO_ERROR */
NC_array **ap = NULL;
NC_var *var = NULL;
int32 ret_value = SUCCEED;
if(handle == NULL || !handle->vars)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(index < 0 || index > handle->vars->count)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
ap = (NC_array **)handle->vars->values;
@@ -3460,8 +3337,7 @@ SDIfreevarAID(NC *handle, /* IN: file handle */
{
if (Hendaccess(var->aid) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
}
@@ -3496,6 +3372,7 @@ SDsetdimscale(int32 id, /* IN: dimension ID */
int32 nt, /* IN: number type of data */
void * data /* IN: scale values */)
{
+ CONSTR(FUNC, "SDsetdimscale"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_dim *dim = NULL;
int32 status;
@@ -3519,31 +3396,27 @@ SDsetdimscale(int32 id, /* IN: dimension ID */
handle = SDIhandle_from_id(id, DIMTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* get the dimension structure */
dim = SDIget_dim(handle, id);
if(dim == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* sanity check, if not SD_UNLIMITED */
if( dim->size != 0 && count != dim->size)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* look for a variable with the same name */
varid = (intn)SDIgetcoordvar(handle, dim, id & 0xffff, nt);
if(varid == -1)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* store the data */
@@ -3553,16 +3426,14 @@ SDsetdimscale(int32 id, /* IN: dimension ID */
status = NCvario(handle, varid, start, end, (Void *)data);
if(status == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* free the AID */
status = SDIfreevarAID(handle, varid);
if(status == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* make sure it gets reflected in the file */
@@ -3597,6 +3468,7 @@ intn
SDgetdimscale(int32 id, /* IN: dimension ID */
void * data /* OUT: scale values */)
{
+ CONSTR(FUNC, "SDgetdimscale"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_dim *dim = NULL;
NC_var *vp = NULL;
@@ -3620,24 +3492,21 @@ SDgetdimscale(int32 id, /* IN: dimension ID */
/* sanity check args */
if(data == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* get the handle */
handle = SDIhandle_from_id(id, DIMTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* get the dimension structure */
dim = SDIget_dim(handle, id);
if(dim == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* look for a variable with the same name */
@@ -3645,8 +3514,7 @@ SDgetdimscale(int32 id, /* IN: dimension ID */
varid = (intn)SDIgetcoordvar(handle, dim, (int32)(id & 0xffff), (int32)0);
if(varid == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* store the data */
@@ -3663,8 +3531,7 @@ SDgetdimscale(int32 id, /* IN: dimension ID */
vp = SDIget_var(handle, varid);
if (vp == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
end[0] = vp->numrecs;
@@ -3674,16 +3541,14 @@ SDgetdimscale(int32 id, /* IN: dimension ID */
status = NCvario(handle, varid, start, end, (Void *)data);
if(status == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* free the AID */
status = SDIfreevarAID(handle, varid);
if(status == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
done:
@@ -3720,6 +3585,7 @@ SDdiminfo(int32 id, /* IN: dimension ID */
int32 *nt, /* OUT: number type of scales */
int32 *nattr /* OUT: the number of local attributes */)
{
+ CONSTR(FUNC, "SDdiminfo"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_dim *dim = NULL;
NC_var *var = NULL;
@@ -3739,25 +3605,29 @@ SDdiminfo(int32 id, /* IN: dimension ID */
handle = SDIhandle_from_id(id, DIMTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->dims == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
dim = SDIget_dim(handle, id);
if(dim == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(name != NULL)
{
+/* GeorgeV switched to use HDmemcpy in r2739. Trying back to HDstrncpy because
+ it should be used to copy a string (emailed with QK 5/27/2016), but tests
+ failed. Some strings are stored with NC_string, more time is needed to
+ figure out the whole scheme. Switch back to using HDmemcpy for now.
+ -BMR, 5/30/2016
+*/
+
#if 0
HDstrncpy(name, dim->name->values, dim->name->len);
#endif
@@ -3801,14 +3671,14 @@ SDdiminfo(int32 id, /* IN: dimension ID */
{
*nt = ((*dp)->numrecs ? (*dp)->HDFtype : 0);
*nattr = ((*dp)->attrs ? (*dp)->attrs->count : 0);
- goto done;
+ HGOTO_DONE(ret_value);
}
}
else /* netCDF file */
{
*nt = (*dp)->HDFtype;
*nattr = ((*dp)->attrs ? (*dp)->attrs->count : 0);
- goto done;
+ HGOTO_DONE(ret_value);
}
} /* name matched */
} /* rank = 1 */
@@ -3820,8 +3690,6 @@ done:
}
/* Normal cleanup */
-
-
return ret_value;
} /* SDdiminfo */
@@ -3870,21 +3738,18 @@ SDgetdimstrs(int32 id, /* IN: dataset ID */
handle = SDIhandle_from_id(id, DIMTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
dim = SDIget_dim(handle, id);
if(dim == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* need to get a pointer to the var now */
@@ -3981,8 +3846,6 @@ done:
}
/* Normal cleanup */
-
-
return ret_value;
} /* SDgetdimstrs */
@@ -4022,10 +3885,12 @@ SDsetexternalfile(int32 id, /* IN: dataset ID */
const char *filename, /* IN: name of external file */
int32 offset /* IN: offset in external file */)
{
- NC *handle = NULL;
- NC_var *var = NULL;
- intn status;
- int ret_value = SUCCEED;
+ CONSTR(FUNC, "SDsetexternalfile"); /* for HGOTO_ERROR */
+ NC *handle = NULL;
+ NC_var *var = NULL;
+ intn extfname_len = 0; /* Length of external file's name */
+ intn status;
+ int ret_value = SUCCEED;
#ifdef SDDEBUG
fprintf(stderr, "SDsetexternalfile: I've been called\n");
@@ -4034,30 +3899,33 @@ SDsetexternalfile(int32 id, /* IN: dataset ID */
/* clear error stack */
HEclear();
+ /* Call SDgetexternalinfo passing in 0 and NULLs to get only the length of
+ the external filename if it exists. A positive value indicates an
+ external file exists and SDsetexternalfile should not have any effect */
+ extfname_len = SDgetexternalinfo(id, 0, NULL, NULL, NULL);
+ if (extfname_len > 0)
+ HGOTO_DONE(DFE_NONE);
+
if(NULL == filename || offset < 0)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
handle = SDIhandle_from_id(id, SDSTYPE);
if(handle == NULL || handle->file_type != HDF_FILE)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
var = SDIget_var(handle, id);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* already exists */
@@ -4065,8 +3933,7 @@ SDsetexternalfile(int32 id, /* IN: dataset ID */
{
/* no need to give a length since the element already exists */
status = (intn)HXcreate(handle->hdf_file, (uint16)DATA_TAG,
- (uint16) var->data_ref,
- filename, offset, (int32)0);
+ (uint16) var->data_ref, filename, offset, (int32)0);
}
else
{
@@ -4083,29 +3950,22 @@ SDsetexternalfile(int32 id, /* IN: dataset ID */
#endif /* NOT_YET */
if(var->data_ref == 0)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOREF, FAIL);
}
/* need to give a length since the element does not exist yet */
status = (intn)HXcreate(handle->hdf_file, (uint16)DATA_TAG,
- (uint16) var->data_ref,
- filename, offset, length);
-
+ (uint16) var->data_ref, filename, offset, length);
}
-
if(status != FAIL)
{
if((var->aid != 0) && (var->aid != FAIL))
{
if (Hendaccess(var->aid) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTENDACCESS, FAIL);
}
-
}
-
var->aid = status;
ret_value = SUCCEED;
}
@@ -4118,8 +3978,6 @@ done:
}
/* Normal cleanup */
-
-
return ret_value;
} /* SDsetexternalfile */
@@ -4144,9 +4002,10 @@ done:
is 0, SDgetexternalinfo will simply return the length of the external file
name, and not the file name itself.
- When the element is not special, SDgetexternalinfo will return
- 0. If the element is SPECIAL_EXT, but the external file name
- doesn't exist, SDgetexternalinfo will return FAIL.
+ When the element is not special or special but not external,
+ SDgetexternalinfo will return 0. If the element is SPECIAL_EXT,
+ but the external file name doesn't exist, SDgetexternalinfo will
+ return FAIL.
IMPORTANT: It is the user's responsibility to see that the
external files are located in the same directory with the main
@@ -4277,9 +4136,10 @@ done:
} /* SDgetexternalinfo */
-/******************************************************************************
+/************************** Deprecated ******************************
NAME
SDgetexternalfile -- retrieves external file information
+ (Deprecated)
USAGE
int32 SDgetexternalfile(id, filename, offset)
int32 id;
@@ -4305,6 +4165,9 @@ done:
Returns length of the external file name or FAIL. If the SDS
does not have external element, the length will be 0.
+ NOTE: This function is replaced by SDgetexternalinfo because it had
+ missed the "length" parameter.
+
******************************************************************************/
intn
SDgetexternalfile(int32 id, /* IN: dataset ID */
@@ -4395,7 +4258,7 @@ done:
}
/* Normal cleanup */
return ret_value;
-} /* SDgetexternalfile */
+} /* SDgetexternalfile (Deprecated) */
/******************************************************************************
@@ -4438,6 +4301,7 @@ SDsetnbitdataset(int32 id, /* IN: dataset ID */
intn sign_ext, /* IN: Whether to sign extend */
intn fill_one /* IN: Whether to fill background w/1's */)
{
+ CONSTR(FUNC, "SDsetnbitdataset"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_var *var = NULL;
model_info m_info; /* modeling information for the HCcreate() call */
@@ -4454,28 +4318,24 @@ SDsetnbitdataset(int32 id, /* IN: dataset ID */
if(start_bit < 0 || bit_len <= 0)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
handle = SDIhandle_from_id(id, SDSTYPE);
if(handle == NULL || handle->file_type != HDF_FILE)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
var = SDIget_var(handle, id);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* set up n-bit parameters */
@@ -4502,8 +4362,7 @@ SDsetnbitdataset(int32 id, /* IN: dataset ID */
#endif /* NOT_YET */
if(var->data_ref == 0)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
} /* end if */
@@ -4520,8 +4379,7 @@ SDsetnbitdataset(int32 id, /* IN: dataset ID */
{
if (Hendaccess(var->aid) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTENDACCESS, FAIL);
}
}
@@ -4562,6 +4420,7 @@ done:
intn
SDsetup_szip_parms( int32 id, NC *handle, comp_info *c_info, int32 *cdims)
{
+ CONSTR(FUNC, "SDsetup_szip_parms"); /* for HGOTO_ERROR */
NC_dim *dim; /* to check if the dimension is unlimited */
int32 dimindex;/* to obtain the NC_dim record */
NC_var *var;
@@ -4576,15 +4435,13 @@ SDsetup_szip_parms( int32 id, NC *handle, comp_info *c_info, int32 *cdims)
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
var = SDIget_var(handle, id);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
ndims = var->assoc->count;
@@ -4644,8 +4501,7 @@ SDsetcompress(int32 id, /* IN: dataset ID */
if (comp_type < COMP_CODE_NONE || comp_type >= COMP_CODE_INVALID)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* Must have encoder to set compression */
@@ -4659,28 +4515,24 @@ SDsetcompress(int32 id, /* IN: dataset ID */
handle = SDIhandle_from_id(id, SDSTYPE);
if(handle == NULL || handle->file_type != HDF_FILE)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
var = SDIget_var(handle, id);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* disallow setting compress for SDS with rank = 0 - BMR, bug #1045 */
if(var->shape == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* unlimited dimensions don't work with compression */
@@ -4693,16 +4545,14 @@ SDsetcompress(int32 id, /* IN: dataset ID */
dim = SDIget_dim(handle, dimindex);
if(dim == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* If this dimension is unlimited, then return FAIL; the subsequent
* writing of this SDS will write uncompressed data */
if (dim->size == SD_UNLIMITED)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
#ifdef H4_HAVE_LIBSZ /* we have the library */
if (comp_type == COMP_CODE_SZIP) {
@@ -4735,8 +4585,7 @@ SDsetcompress(int32 id, /* IN: dataset ID */
#endif /* NOT_YET */
if(var->data_ref == 0)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
} /* end if */
@@ -4756,8 +4605,7 @@ SDsetcompress(int32 id, /* IN: dataset ID */
{
if (Hendaccess(var->aid) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_CANTENDACCESS, FAIL);
}
}
@@ -4773,22 +4621,19 @@ SDsetcompress(int32 id, /* IN: dataset ID */
vg = Vattach(handle->hdf_file, var->vgid, "w");
if(vg == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* add new Vdata to existing Vgroup */
if (Vaddtagref(vg, (int32) DATA_TAG, (int32) var->data_ref) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* detach from the variable's VGroup --- will no longer need it */
if (Vdetach(vg) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
}
@@ -5137,6 +4982,7 @@ int32
SDfindattr(int32 id, /* IN: object ID */
const char *attrname /* IN: attribute name */)
{
+ CONSTR(FUNC, "SDfindattr"); /* for HGOTO_ERROR */
NC_array *ap = NULL;
NC_array **app = NULL;
NC_attr **attr = NULL;
@@ -5151,15 +4997,13 @@ SDfindattr(int32 id, /* IN: object ID */
/* determine what type of ID we've been given */
if(SDIapfromid(id, &handle, &app) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
ap = (*app);
if(ap == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/*
@@ -5175,8 +5019,8 @@ SDfindattr(int32 id, /* IN: object ID */
if( len == (*attr)->name->len
&& HDstrncmp(attrname, (*attr)->name->values, HDstrlen(attrname)) == 0)
{
- ret_value = attrid ; /* found it */
- goto done;
+ /* found it */
+ HGOTO_DONE(attrid);
}
}
@@ -5207,6 +5051,7 @@ done:
int32
SDidtoref(int32 id /* IN: dataset ID */)
{
+ CONSTR(FUNC, "SDidtoref"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_var *var = NULL;
int32 ret_value = FAIL;
@@ -5221,21 +5066,18 @@ SDidtoref(int32 id /* IN: dataset ID */)
handle = SDIhandle_from_id(id, SDSTYPE);
if(handle == NULL || handle->file_type != HDF_FILE)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
var = SDIget_var(handle, id);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
ret_value = (int32) var->ndg_ref;
@@ -5267,6 +5109,7 @@ int32
SDreftoindex(int32 fid, /* IN: file ID */
int32 ref /* IN: reference number */)
{
+ CONSTR(FUNC, "SDreftoindex"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_var **dp = NULL;
intn ii;
@@ -5282,14 +5125,12 @@ SDreftoindex(int32 fid, /* IN: file ID */
handle = SDIhandle_from_id(fid, CDFTYPE);
if(handle == NULL || handle->file_type != HDF_FILE)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
dp = (NC_var**) handle->vars->values;
@@ -5297,8 +5138,7 @@ SDreftoindex(int32 fid, /* IN: file ID */
{
if((*dp)->ndg_ref == ref)
{
- ret_value = ii;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, ii);
}
}
@@ -5331,6 +5171,7 @@ done:
int32
SDisrecord(int32 id /* IN: dataset ID */)
{
+ CONSTR(FUNC, "SDisrecord"); /* for HGOTO_ERROR */
NC *handle;
NC_var *var;
int32 ret_value = TRUE;
@@ -5345,27 +5186,24 @@ SDisrecord(int32 id /* IN: dataset ID */)
handle = SDIhandle_from_id(id, SDSTYPE);
if(handle == NULL)
{
- ret_value = FALSE;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FALSE);
}
if(handle->vars == NULL)
{
- ret_value = FALSE;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FALSE);
}
var = SDIget_var(handle, id);
if(var == NULL)
{
- ret_value = FALSE;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FALSE);
}
if(var->shape == NULL)
{
- ret_value = TRUE; /* EP thinks it should return true - BMR, bug #1045 */
- goto done;
+ /* EP thinks it should return true - BMR, bug #1045 */
+ HGOTO_ERROR(DFE_ARGS, TRUE);
}
if(var->shape[0] == SD_UNLIMITED)
@@ -5399,6 +5237,7 @@ done:
intn
SDiscoordvar(int32 id /* IN: dataset ID */)
{
+ CONSTR(FUNC, "SDiscoordvar"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_var *var = NULL;
NC_dim *dim = NULL;
@@ -5415,35 +5254,30 @@ SDiscoordvar(int32 id /* IN: dataset ID */)
handle = SDIhandle_from_id(id, SDSTYPE);
if(handle == NULL)
{
- ret_value = FALSE;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->vars == NULL)
{
- ret_value = FALSE;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
var = SDIget_var(handle, id);
if(var == NULL)
{
- ret_value = FALSE;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* check whether this var is an SDS or a coordinate variable, then
return the appropriate value (if and else if) */
if (var->var_type == IS_SDSVAR)
{
- ret_value = FALSE;
- goto done;
+ HGOTO_DONE(FALSE);
}
else if(var->var_type == IS_CRDVAR)
{
- ret_value = TRUE;
- goto done;
+ HGOTO_DONE(TRUE);
}
/* whether or not this var is a coord var is unknown because the data was
@@ -5457,20 +5291,17 @@ SDiscoordvar(int32 id /* IN: dataset ID */)
dim = SDIget_dim(handle, dimindex);
if(dim == NULL)
{
- ret_value = FALSE;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FALSE);
}
if(var->name->len != dim->name->len)
{
- ret_value = FALSE;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FALSE);
}
if(HDstrcmp(var->name->values, dim->name->values))
{
- ret_value = FALSE;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FALSE);
}
ret_value = TRUE;
@@ -5553,29 +5384,25 @@ SDsetrag(int32 sdsid,
handle = SDIhandle_from_id(sdsid, SDSTYPE);
if(handle == NULL || handle->file_type != HDF_FILE)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
var = SDIget_var(handle, sdsid);
if((var == NULL) || (var->is_ragged == FALSE))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* verify writing to a valid area */
if(var->rag_fill != low)
{
printf("var->rag_fill %d low %d\n", var->rag_fill, low);
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* allocate some space for the ragged dimension if needed */
@@ -5585,8 +5412,7 @@ SDsetrag(int32 sdsid,
var->rag_list = (int32 *) HDmalloc(sizeof(int32) * var->dsizes[0]);
if(var->rag_list == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
}
@@ -5628,6 +5454,7 @@ intn
SDsetaccesstype(int32 id, /* IN: dataset ID */
uintn accesstype /* IN: access type */)
{
+ CONSTR(FUNC, "SDsetaccesstype"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_var *var = NULL;
intn ret_value = SUCCEED;
@@ -5646,28 +5473,24 @@ SDsetaccesstype(int32 id, /* IN: dataset ID */
case DFACC_PARALLEL:
break;
default:
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
handle = SDIhandle_from_id(id, SDSTYPE);
if(handle == NULL || handle->file_type != HDF_FILE)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if(handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
var = SDIget_var(handle, id);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* if aid is not valid yet, there is no access_rec setup yet. */
@@ -5707,6 +5530,7 @@ intn
SDsetblocksize(int32 sdsid, /* IN: dataset ID */
int32 block_size /* IN: size of the block in bytes */)
{
+ CONSTR(FUNC, "SDsetblocksize"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_var *var = NULL;
intn ret_value = SUCCEED;
@@ -5722,16 +5546,14 @@ SDsetblocksize(int32 sdsid, /* IN: dataset ID */
handle = SDIhandle_from_id(sdsid, SDSTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* get the variable */
var = SDIget_var(handle, sdsid);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* set the block size */
@@ -5841,6 +5663,7 @@ SDsetfillmode(int32 sd_id, /* IN: HDF file ID, returned from SDstart */
either SD_FILL or SD_NOFILL.
SD_FILL is the default mode. */)
{
+ CONSTR(FUNC, "SDsetfillmode"); /* for HGOTO_ERROR */
NC *handle = NULL;
intn cdfid;
intn ret_value = FAIL;
@@ -5856,8 +5679,7 @@ SDsetfillmode(int32 sd_id, /* IN: HDF file ID, returned from SDstart */
handle = SDIhandle_from_id(sd_id, CDFTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
cdfid = (intn)sd_id & 0xffff;
@@ -5891,6 +5713,7 @@ SDsetdimval_comp(int32 dimid, /* IN: dimension ID, returned from SDgetdimid *
SD_DIMVAL_BW_INCOMP -- incompatible.
(defined in mfhdf.h ) */)
{
+ CONSTR(FUNC, "SDsetdimval_comp"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_dim *dim = NULL;
intn ret_value = SUCCEED;
@@ -5906,16 +5729,14 @@ SDsetdimval_comp(int32 dimid, /* IN: dimension ID, returned from SDgetdimid *
handle = SDIhandle_from_id(dimid, DIMTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* get the dimension structure */
dim = SDIget_dim(handle, dimid);
if(dim == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* if (dim->size != SD_UNLIMITED
@@ -5955,6 +5776,7 @@ done:
intn
SDisdimval_bwcomp(int32 dimid /* IN: dimension ID, returned from SDgetdimid */)
{
+ CONSTR(FUNC, "SDisdimval_bwcomp"); /* for HGOTO_ERROR */
NC *handle = NULL;
NC_dim *dim = NULL;
intn ret_value = FAIL;
@@ -5970,16 +5792,14 @@ SDisdimval_bwcomp(int32 dimid /* IN: dimension ID, returned from SDgetdimid */)
handle = SDIhandle_from_id(dimid, DIMTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* get the dimension structure */
dim = SDIget_dim(handle, dimid);
if(dim == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* Default is incompatible. Return dim->dim00_compat.
@@ -6155,23 +5975,20 @@ SDsetchunk(int32 sdsid, /* IN: sds access id */
handle = SDIhandle_from_id(sdsid, SDSTYPE);
if(handle == NULL || handle->file_type != HDF_FILE || handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* get variable from id */
var = SDIget_var(handle, sdsid);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* disallow setting chunk for SDS with rank = 0 - BMR, bug #1045 */
if(var->shape == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* Decide type of defintion passed in */
@@ -6248,8 +6065,7 @@ SDsetchunk(int32 sdsid, /* IN: sds access id */
chunk[0].minfo = &minfo; /* dummy */
break;
default:
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
#ifdef CHK_DEBUG
@@ -6268,14 +6084,12 @@ SDsetchunk(int32 sdsid, /* IN: sds access id */
#ifdef CHK_DEBUG
fprintf(stderr,"SDsetchunk: failed to get data ref \n");
#endif
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
}
else /* data ref exists, Error since can't convert existing SDS to chunked */
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* Now start setting chunk info */
@@ -6287,8 +6101,7 @@ SDsetchunk(int32 sdsid, /* IN: sds access id */
/* allocate space for chunk dimensions */
if ((chunk[0].pdims = (DIM_DEF *)HDmalloc(ndims*sizeof(DIM_DEF))) == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* initialize datset/chunk sizes using CHUNK defintion structure */
@@ -6308,8 +6121,7 @@ SDsetchunk(int32 sdsid, /* IN: sds access id */
fprintf(stderr,"SDsetchunk: unlimited dimension case \n");
fflush(stderr);
#endif
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
#ifdef CHK_DEBUG
@@ -6325,8 +6137,7 @@ SDsetchunk(int32 sdsid, /* IN: sds access id */
fprintf(stderr,"SDsetchunk: chunk length less than 1, cdims[%d]=%d \n",i,cdims[i]);
fflush(stderr);
#endif
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
#ifdef CHK_DEBUG
fprintf(stderr,"SDsetchunk: cdims[%d]=%d \n",i,cdims[i]);
@@ -6355,8 +6166,7 @@ SDsetchunk(int32 sdsid, /* IN: sds access id */
fill_val_len = var->HDFsize;
if ((fill_val = (void *)HDmalloc(fill_val_len)) == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* get fill value if one is set for this Dataset.
@@ -6395,24 +6205,21 @@ SDsetchunk(int32 sdsid, /* IN: sds access id */
*((float64 *)p) = FILL_DOUBLE;
break;
default:
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
}
/* figure out if fill value has to be converted */
if (FAIL == (platntsubclass = DFKgetPNSC(var->HDFtype, DF_MT)))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
if (DFKisnativeNT(var->HDFtype))
{
if (FAIL == (outntsubclass = DFKgetPNSC(var->HDFtype, DF_MT)))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
}
else
@@ -6432,8 +6239,7 @@ SDsetchunk(int32 sdsid, /* IN: sds access id */
if(tBuf == NULL)
{
tBuf_size = 0;
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
} /* end if */
} /* end if */
@@ -6445,8 +6251,7 @@ SDsetchunk(int32 sdsid, /* IN: sds access id */
if (FAIL == DFKconvert(fill_val, tBuf, var->HDFtype,
(uint32) (fill_val_len/var->HDFsize), DFACC_WRITE, 0, 0))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
/* check to see already special.
@@ -6488,8 +6293,7 @@ SDsetchunk(int32 sdsid, /* IN: sds access id */
{
if (Hendaccess(var->aid) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
}
@@ -6518,142 +6322,6 @@ done:
return ret_value;
} /* SDsetchunk */
-/******************************************************************************
- NAME
- SDgetchunkinfo_old -- get Info on SDS
-
- DESCRIPTION
- This routine gets any special information on the SDS. If its chunked,
- chunked and compressed or just a regular SDS. Currently it will only
- fill the array of chunk lengths for each dimension as specified in
- the 'HDF_CHUNK_DEF' union. It does not tell you the type of compression
- or the compression parameters used. You can pass in a NULL for 'chunk_def'
- if don't want the chunk lengths for each dimension.
- If successfull it will return a bit-or'd value in 'flags' indicating
- if the SDS is chunked(HDF_CHUNK), chunked and compressed(HDF_CHUNK | HDF_COMP)
- or non-chunked(HDF_NONE).
-
- e.g. 4x4 array - Pseudo-C
- {
- HDF_CHUNK_DEF rchunk_def;
- int32 cflags;
- ...
- SDgetchunkinfo_old(sdsid, &rchunk_def, &cflags);
- ...
- }
-
- RETURNS
- SUCCEED/FAIL
-
- AUTHOR
- -GeorgeV
-******************************************************************************/
-intn
-SDgetchunkinfo_old(int32 sdsid, /* IN: sds access id */
- HDF_CHUNK_DEF *chunk_def, /* IN/OUT: chunk definition */
- int32 *flags /* IN/OUT: flags */)
-{
- NC *handle = NULL; /* file handle */
- NC_var *var = NULL; /* SDS variable */
- sp_info_block_t info_block; /* special info block */
- int16 special; /* Special code */
- intn i; /* loop variable */
- intn ret_value = SUCCEED; /* return value */
-
- /* clear error stack */
- HEclear();
-
- /* Check args */
-
- /* get file handle and verify it is an HDF file
- we only handle dealing with SDS only not coordinate variables */
- handle = SDIhandle_from_id(sdsid, SDSTYPE);
- if(handle == NULL || handle->file_type != HDF_FILE || handle->vars == NULL)
- {
- ret_value = FAIL;
- goto done;
- }
-
- /* get variable from id */
- var = SDIget_var(handle, sdsid);
- if(var == NULL)
- {
- ret_value = FAIL;
- goto done;
- }
-
- /* Data set is empty and not special */
- if(var->data_ref == 0)
- {
- *flags = HDF_NONE; /* regular SDS */
- ret_value = SUCCEED;
- goto done;
- }
-
- /* Check to see if data aid exists? i.e. may need to create a ref for SDS */
- if(var->aid == FAIL && hdf_get_vp_aid(handle, var) == FAIL)
- {
- ret_value = FAIL;
- goto done;
- }
-
- /* inquire about element */
- ret_value = Hinquire(var->aid, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &special);
- if (ret_value != FAIL)
- { /* make sure it is chunked element */
- if (special == SPECIAL_CHUNKED)
- { /* get info about chunked element */
- if ((ret_value = HDget_special_info(var->aid, &info_block)) != FAIL)
- { /* Does user want chunk lengths back? */
- if (chunk_def != NULL)
- {
- /* we assume user has allocat space for chunk lengths */
- /* copy chunk lengths over */
- for (i = 0; i < info_block.ndims; i++)
- {
- chunk_def->chunk_lengths[i] = info_block.cdims[i];
- }
- }
- /* dont forget to free up info is special info block
- This space was allocated by the library */
- HDfree(info_block.cdims);
-
- /* Check to see if compressed.
- Currently we don't fill in the 'comp' structure
- because currently only the information about the
- compression type is available in get compression
- info code and not the parameters that went along. */
- switch(info_block.comp_type)
- {
- case COMP_CODE_NONE:
- *flags = HDF_CHUNK;
- break;
- case COMP_CODE_NBIT:
- *flags = (HDF_CHUNK | HDF_NBIT);
- break;
- default:
- *flags = (HDF_CHUNK | HDF_COMP);
- break;
- }
- }
- }
- else /* not special chunked element */
- {
- *flags = HDF_NONE; /* regular SDS */
- }
- }
-
- done:
- if (ret_value == FAIL)
- { /* Failure cleanup */
-
- }
- /* Normal cleanup */
-
-
- return ret_value;
-} /* SDgetchunkinfo_old() */
-
/******************************************************************************
NAME
@@ -6714,8 +6382,7 @@ SDgetchunkinfo(int32 sdsid, /* IN: sds access id */
handle = SDIhandle_from_id(sdsid, SDSTYPE);
if(handle == NULL || handle->file_type != HDF_FILE || handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* Get variable from id */
@@ -6728,8 +6395,7 @@ SDgetchunkinfo(int32 sdsid, /* IN: sds access id */
if(var->data_ref == 0)
{
*flags = HDF_NONE; /* regular SDS */
- ret_value = SUCCEED;
- goto done;
+ HGOTO_DONE(SUCCEED);
}
#ifdef added_by_mistake
@@ -6740,8 +6406,7 @@ SDgetchunkinfo(int32 sdsid, /* IN: sds access id */
/* Check if data aid exists; if not, set up an access elt for reading */
if(var->aid == FAIL && hdf_get_vp_aid(handle, var) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
#endif
@@ -6956,8 +6621,7 @@ SDwritechunk(int32 sdsid, /* IN: access aid to SDS */
handle = SDIhandle_from_id(sdsid, SDSTYPE);
if(handle == NULL || handle->file_type != HDF_FILE || handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
@@ -6965,15 +6629,13 @@ SDwritechunk(int32 sdsid, /* IN: access aid to SDS */
var = SDIget_var(handle, sdsid);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* Check to see if data aid exists? i.e. may need to create a ref for SDS */
if(var->aid == FAIL && hdf_get_vp_aid(handle, var) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* Check compression method is enabled */
@@ -7017,8 +6679,7 @@ SDwritechunk(int32 sdsid, /* IN: access aid to SDS */
if (FAIL == (platntsubclass = DFKgetPNSC(var->HDFtype, DF_MT)))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
if (DFKisnativeNT(var->HDFtype))
@@ -7026,7 +6687,7 @@ SDwritechunk(int32 sdsid, /* IN: access aid to SDS */
if (FAIL == (outntsubclass = DFKgetPNSC(var->HDFtype, DF_MT)))
{
ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
}
else
@@ -7047,7 +6708,7 @@ SDwritechunk(int32 sdsid, /* IN: access aid to SDS */
{
tBuf_size = 0;
ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
} /* end if */
} /* end if */
@@ -7062,28 +6723,27 @@ SDwritechunk(int32 sdsid, /* IN: access aid to SDS */
if (FAIL == DFKconvert((VOIDP)datap, tBuf, var->HDFtype,
(byte_count/var->HDFsize), DFACC_WRITE, 0, 0))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
/* write it out now */
- if ((ret_value = HMCwriteChunk(var->aid, origin, tBuf))
- != FAIL)
+ if ((ret_value = HMCwriteChunk(var->aid, origin, tBuf)) != FAIL)
{
- ret_value = SUCCEED;
+ HGOTO_DONE(SUCCEED);
}
-
- goto done; /* done */
+/* need to make sure correctness - remove when done */
+ else
+ HGOTO_ERROR(DFE_WRITEERROR, FAIL);
} /* end if */
else
{
- if ((ret_value = HMCwriteChunk(var->aid, origin, datap))
- != FAIL)
+ if ((ret_value = HMCwriteChunk(var->aid, origin, datap)) != FAIL)
{
- ret_value = SUCCEED;
+ HGOTO_DONE(SUCCEED);
}
-
- goto done; /* done */
+/* need to make sure correctness - remove when done */
+ else
+ HGOTO_ERROR(DFE_WRITEERROR, FAIL);
}
} /* end if get special info block */
}
@@ -7171,16 +6831,14 @@ SDreadchunk(int32 sdsid, /* IN: access aid to SDS */
handle = SDIhandle_from_id(sdsid, SDSTYPE);
if(handle == NULL || handle->file_type != HDF_FILE || handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* get variable from id */
var = SDIget_var(handle, sdsid);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* Dev note: empty SDS should have been checked here and SDreadchunk would
@@ -7195,8 +6853,7 @@ SDreadchunk(int32 sdsid, /* IN: access aid to SDS */
/* Check to see if data aid exists? i.e. may need to create a ref for SDS */
if(var->aid == FAIL && hdf_get_vp_aid(handle, var) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
#endif
@@ -7220,7 +6877,7 @@ SDreadchunk(int32 sdsid, /* IN: access aid to SDS */
{
var->aid = Hstartread(handle->hdf_file, var->data_tag, var->data_ref);
if(var->aid == FAIL) /* catch FAIL from Hstartread */
- HGOTO_ERROR(DFE_NOMATCH, FAIL);
+ HGOTO_ERROR(DFE_CANTACCESS, FAIL);
}
/* inquire about element */
@@ -7248,16 +6905,14 @@ SDreadchunk(int32 sdsid, /* IN: access aid to SDS */
if (FAIL == (platntsubclass = DFKgetPNSC(var->HDFtype, DF_MT)))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
if (DFKisnativeNT(var->HDFtype))
{
if (FAIL == (outntsubclass = DFKgetPNSC(var->HDFtype, DF_MT)))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
}
else
@@ -7277,8 +6932,7 @@ SDreadchunk(int32 sdsid, /* IN: access aid to SDS */
if(tBuf == NULL)
{
tBuf_size = 0;
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_NOSPACE, FAIL);
} /* end if */
} /* end if */
@@ -7297,14 +6951,13 @@ SDreadchunk(int32 sdsid, /* IN: access aid to SDS */
if (FAIL == DFKconvert(tBuf, datap, var->HDFtype,
(byte_count/var->HDFsize), DFACC_READ, 0, 0))
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_INTERNAL, FAIL);
}
ret_value = SUCCEED;
}
-
- goto done; /* done */
+ else
+ HGOTO_ERROR(DFE_READERROR, FAIL);
} /* end if */
else
{
@@ -7313,8 +6966,8 @@ SDreadchunk(int32 sdsid, /* IN: access aid to SDS */
{
ret_value = SUCCEED;
}
-
- goto done; /* done */
+ else
+ HGOTO_ERROR(DFE_READERROR, FAIL);
}
} /* end if get special info block */
}
@@ -7404,6 +7057,7 @@ SDsetchunkcache(int32 sdsid, /* IN: access aid to mess with */
int32 maxcache, /* IN: max number of chunks to cache */
int32 flags /* IN: flags = 0, HDF_CACHEALL */)
{
+ CONSTR(FUNC, "SDsetchunkcache"); /* for HGOTO_ERROR */
NC *handle = NULL; /* file handle */
NC_var *var = NULL; /* SDS variable */
int16 special; /* Special code */
@@ -7415,14 +7069,12 @@ SDsetchunkcache(int32 sdsid, /* IN: access aid to mess with */
/* Check args */
if (maxcache < 1 )
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
if (flags != 0 && flags != HDF_CACHEALL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* get file handle and verify it is an HDF file
@@ -7430,23 +7082,20 @@ SDsetchunkcache(int32 sdsid, /* IN: access aid to mess with */
handle = SDIhandle_from_id(sdsid, SDSTYPE);
if(handle == NULL || handle->file_type != HDF_FILE || handle->vars == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* get variable from id */
var = SDIget_var(handle, sdsid);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* Check to see if data aid exists? i.e. may need to create a ref for SDS */
if(var->aid == FAIL && hdf_get_vp_aid(handle, var) == FAIL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* inquire about element */
@@ -7513,16 +7162,14 @@ SDcheckempty(int32 sdsid, /* IN: dataset ID */
handle = SDIhandle_from_id(sdsid, SDSTYPE);
if(handle == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* get the variable */
var = SDIget_var(handle, sdsid);
if(var == NULL)
{
- ret_value = FAIL;
- goto done;
+ HGOTO_ERROR(DFE_ARGS, FAIL);
}
/* assume that the SDS is not empty until proving otherwise */
@@ -7586,6 +7233,7 @@ done:
hdf_idtype_t
SDidtype(int32 an_id)
{
+ CONSTR(FUNC, "SDidtype"); /* for HGOTO_ERROR */
NC *handle = NULL; /* file record struct */
hdf_idtype_t ret_value = NOT_SDAPI_ID;
@@ -7662,9 +7310,8 @@ SDreset_maxopenfiles(intn req_max)
/* Reset the max NC open and re-allocate cdf list appropriately */
ret_value = NC_reset_maxopenfiles(req_max);
- if (ret_value == 0)
- /* no successful allocation */
- HGOTO_ERROR(DFE_NOSPACE, FAIL); /* must change DFE_NOSPACE to something else, if the other case of returning 0 exists??? */
+ if (ret_value == -1)
+ HGOTO_ERROR(DFE_INTERNAL, FAIL); /* should propagate error code */
done:
if (ret_value == FAIL)
@@ -7751,6 +7398,7 @@ done:
intn
SDget_numopenfiles()
{
+ CONSTR(FUNC, "SDget_numopenfiles"); /* for HGOTO_ERROR */
intn ret_value = SUCCEED;
#ifdef SDDEBUG
diff --git a/mfhdf/libsrc/string.c b/mfhdf/libsrc/string.c
index 223596b..28030fb 100644
--- a/mfhdf/libsrc/string.c
+++ b/mfhdf/libsrc/string.c
@@ -14,7 +14,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/* $Id: string.c 4963 2007-09-15 17:20:52Z bmribler $ */
+/* $Id: string.c 6431 2016-06-14 13:48:03Z bmribler $ */
#include <string.h>
#include "local_nc.h"
@@ -169,7 +169,7 @@ xdr_NC_string(xdrs, spp)
XDR *xdrs;
NC_string **spp;
{
- u_long count ;
+ u_long count = 0;
int status ;
switch (xdrs->x_op) {
diff --git a/mfhdf/libsrc/var.c b/mfhdf/libsrc/var.c
index b23d19b..e0cd7aa 100644
--- a/mfhdf/libsrc/var.c
+++ b/mfhdf/libsrc/var.c
@@ -14,7 +14,7 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/* $Id: var.c 5981 2013-12-16 16:27:16Z bmribler $ */
+/* $Id: var.c 6431 2016-06-14 13:48:03Z bmribler $ */
#include <string.h>
#include "local_nc.h"
@@ -838,7 +838,7 @@ xdr_NC_var(xdrs, vpp)
XDR *xdrs;
NC_var **vpp;
{
- u_long begin ;
+ u_long begin = 0;
if( xdrs->x_op == XDR_FREE)
{
@@ -863,6 +863,10 @@ xdr_NC_var(xdrs, vpp)
if( !xdr_NC_array(xdrs, &((*vpp)->attrs)))
return(FALSE) ;
+ /* This USE_ENUM may not be necessary after xdr and code cleanup.
+ See HDFFR-1318, HDFFR-1327, and other Mac/XDR issues for details.
+ I had tried and xdr_enum worked consistently even though there were
+ failures in other places. -BMR, 6/14/2016 */
#ifdef USE_ENUM
if (! xdr_enum(xdrs, (enum_t *)&((*vpp)->type)) ) {
return (FALSE);
diff --git a/mfhdf/ncdump/CMakeLists.txt b/mfhdf/ncdump/CMakeLists.txt
index 2cfd98e..fa395ae 100644
--- a/mfhdf/ncdump/CMakeLists.txt
+++ b/mfhdf/ncdump/CMakeLists.txt
@@ -1,5 +1,5 @@
-cmake_minimum_required (VERSION 2.8.10)
-PROJECT (HDF4_MFHDF_NCDUMP)
+cmake_minimum_required (VERSION 3.1.0)
+project (HDF4_MFHDF_NCDUMP)
INCLUDE_DIRECTORIES (${HDF4_HDFSOURCE_DIR})
INCLUDE_DIRECTORIES (${HDF4_MFHDFSOURCE_DIR})
@@ -23,15 +23,15 @@ set (ncdump_SRCS
${HDF4_MFHDF_NCDUMP_SOURCE_DIR}/vardata.c
${HDF4_SOURCE_DIR}/mfhdf/util/getopt.c
)
-
-ADD_EXECUTABLE (ncdump ${ncdump_SRCS})
-TARGET_C_PROPERTIES (ncdump " " " ")
+
+add_executable (ncdump ${ncdump_SRCS})
+TARGET_C_PROPERTIES (ncdump STATIC " " " ")
if (HDF4_BUILD_XDR_LIB)
target_link_libraries (ncdump ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS} ${HDF4_MF_XDR_LIB_TARGET} )
else (HDF4_BUILD_XDR_LIB)
target_link_libraries (ncdump ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
endif (HDF4_BUILD_XDR_LIB)
-TARGET_NAMING (ncdump ${LIB_TYPE})
+TARGET_NAMING (ncdump STATIC)
if (BUILD_TESTING)
include (CMakeTests.cmake)
@@ -52,10 +52,7 @@ INSTALL_PROGRAM_PDB (ncdump ${HDF4_INSTALL_TOOLS_BIN_DIR} toolsapplications)
INSTALL (
TARGETS
ncdump
- RUNTIME DESTINATION
- ${HDF4_INSTALL_TOOLS_BIN_DIR}
- COMPONENT
- toolsapplications
+ RUNTIME DESTINATION ${HDF4_INSTALL_TOOLS_BIN_DIR} COMPONENT toolsapplications
)
diff --git a/mfhdf/ncdump/Makefile.in b/mfhdf/ncdump/Makefile.in
index d8aed14..7f15fe8 100644
--- a/mfhdf/ncdump/Makefile.in
+++ b/mfhdf/ncdump/Makefile.in
@@ -90,7 +90,20 @@ bin_PROGRAMS = ncdump$(EXEEXT)
TESTS = $(TEST_SCRIPT)
subdir = mfhdf/ncdump
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -415,12 +428,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -463,11 +491,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/mfhdf/ncgen/CMakeLists.txt b/mfhdf/ncgen/CMakeLists.txt
index 7fc3a79..a24d0b0 100644
--- a/mfhdf/ncgen/CMakeLists.txt
+++ b/mfhdf/ncgen/CMakeLists.txt
@@ -1,5 +1,5 @@
-cmake_minimum_required (VERSION 2.8.10)
-PROJECT (HDF4_MFHDF_NCGEN)
+cmake_minimum_required (VERSION 3.1.0)
+project (HDF4_MFHDF_NCGEN)
INCLUDE_DIRECTORIES (
${HDF4_HDFSOURCE_DIR}
@@ -36,15 +36,15 @@ set (ncgen_SRCS
if (WIN32)
set (ncgen_SRCS ${ncgen_SRCS} ${HDF4_SOURCE_DIR}/mfhdf/util/getopt.c)
endif (WIN32)
-
-ADD_EXECUTABLE (ncgen ${ncgen_SRCS})
-TARGET_C_PROPERTIES (ncgen " " " ")
+
+add_executable (ncgen ${ncgen_SRCS})
+TARGET_C_PROPERTIES (ncgen STATIC " " " ")
if (HDF4_BUILD_XDR_LIB)
target_link_libraries (ncgen ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS} ${HDF4_MF_XDR_LIB_TARGET} )
else (HDF4_BUILD_XDR_LIB)
target_link_libraries (ncgen ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
endif (HDF4_BUILD_XDR_LIB)
-TARGET_NAMING (ncgen ${LIB_TYPE})
+TARGET_NAMING (ncgen STATIC)
if (BUILD_TESTING)
include (CMakeTests.cmake)
@@ -64,8 +64,5 @@ INSTALL_PROGRAM_PDB (ncgen ${HDF4_INSTALL_TOOLS_BIN_DIR} toolsapplications)
INSTALL (
TARGETS
ncgen
- RUNTIME DESTINATION
- ${HDF4_INSTALL_TOOLS_BIN_DIR}
- COMPONENT
- toolsapplications
+ RUNTIME DESTINATION ${HDF4_INSTALL_TOOLS_BIN_DIR} COMPONENT toolsapplications
)
diff --git a/mfhdf/ncgen/Makefile.in b/mfhdf/ncgen/Makefile.in
index 9ec64eb..fd8e350 100644
--- a/mfhdf/ncgen/Makefile.in
+++ b/mfhdf/ncgen/Makefile.in
@@ -96,7 +96,20 @@ check_PROGRAMS = ctest0$(EXEEXT) $(am__EXEEXT_1)
TESTS = $(TEST_SCRIPT)
subdir = mfhdf/ncgen
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -430,12 +443,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -478,11 +506,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/mfhdf/nctest/CMakeLists.txt b/mfhdf/nctest/CMakeLists.txt
index 261e9e5..bfbc3da 100644
--- a/mfhdf/nctest/CMakeLists.txt
+++ b/mfhdf/nctest/CMakeLists.txt
@@ -1,5 +1,5 @@
-cmake_minimum_required (VERSION 2.8.10)
-PROJECT (HDF4_MFHDF_NCTEST)
+cmake_minimum_required (VERSION 3.1.0)
+project (HDF4_MFHDF_NCTEST)
INCLUDE_DIRECTORIES (${HDF4_HDFSOURCE_DIR})
INCLUDE_DIRECTORIES (${HDF4_MFHDFSOURCE_DIR})
@@ -40,14 +40,14 @@ set (nctest_SRCS
${HDF4_MFHDF_NCTEST_SOURCE_DIR}/vputgetg.c
)
-ADD_EXECUTABLE (nctest ${nctest_SRCS})
-TARGET_C_PROPERTIES (nctest " " " ")
+add_executable (nctest ${nctest_SRCS})
+TARGET_C_PROPERTIES (nctest STATIC " " " ")
if (HDF4_BUILD_XDR_LIB)
target_link_libraries (nctest ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS} ${HDF4_MF_XDR_LIB_TARGET} )
else (HDF4_BUILD_XDR_LIB)
target_link_libraries (nctest ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
endif (HDF4_BUILD_XDR_LIB)
-TARGET_NAMING (nctest ${LIB_TYPE})
+TARGET_NAMING (nctest STATIC)
set (HDF4_NC_TEST_FILES
test_unlim.cdl
@@ -57,7 +57,7 @@ set (HDF4_NC_TEST_FILES
foreach (h4_file ${HDF4_NC_TEST_FILES})
set (dest "${PROJECT_BINARY_DIR}/${h4_file}")
#message (STATUS " Copying ${HDF4_MFHDF_NCTEST_SOURCE_DIR}/${h4_file} to ${PROJECT_BINARY_DIR}/")
- ADD_CUSTOM_COMMAND (
+ add_custom_command (
TARGET nctest
POST_BUILD
COMMAND ${CMAKE_COMMAND}
diff --git a/mfhdf/nctest/Makefile.in b/mfhdf/nctest/Makefile.in
index 4e60508..f5573b6 100644
--- a/mfhdf/nctest/Makefile.in
+++ b/mfhdf/nctest/Makefile.in
@@ -88,7 +88,20 @@ check_PROGRAMS = nctest$(EXEEXT)
TESTS = $(am__EXEEXT_1)
subdir = mfhdf/nctest
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -413,12 +426,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -461,11 +489,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/mfhdf/test/CMakeLists.txt b/mfhdf/test/CMakeLists.txt
index 96972ca..f715a1f 100644
--- a/mfhdf/test/CMakeLists.txt
+++ b/mfhdf/test/CMakeLists.txt
@@ -1,5 +1,5 @@
-cmake_minimum_required (VERSION 2.8.10)
-PROJECT (HDF4_MFHDF_TEST)
+cmake_minimum_required (VERSION 3.1.0)
+project (HDF4_MFHDF_TEST)
INCLUDE_DIRECTORIES (${HDF4_HDFSOURCE_DIR})
INCLUDE_DIRECTORIES (${HDF4_MFHDFSOURCE_DIR})
@@ -35,62 +35,53 @@ set (hdftest_SRCS
${HDF4_MFHDF_TEST_SOURCE_DIR}/tattdatainfo.c
${HDF4_MFHDF_TEST_SOURCE_DIR}/tdatainfo.c
${HDF4_MFHDF_TEST_SOURCE_DIR}/tdatasizes.c
+ ${HDF4_MFHDF_TEST_SOURCE_DIR}/texternal.c
${HDF4_MFHDF_TEST_SOURCE_DIR}/tutils.c
)
#-- Adding test for hdftest
add_executable (hdftest ${hdftest_SRCS})
-TARGET_NAMING (hdftest ${LIB_TYPE})
-TARGET_C_PROPERTIES (hdftest " " " ")
+TARGET_NAMING (hdftest STATIC)
+TARGET_C_PROPERTIES (hdftest STATIC " " " ")
if (HDF4_BUILD_XDR_LIB)
target_link_libraries (hdftest ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${HDF4_MF_XDR_LIB_TARGET} ${LINK_LIBS})
else (HDF4_BUILD_XDR_LIB)
target_link_libraries (hdftest ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
endif (HDF4_BUILD_XDR_LIB)
-
-#-- Copy all the dat files from the test directory into the source directory
-set (HDF4_REFERENCE_TEST_FILES
- sds_szipped.dat
- smallslice.0000.nc
- test1.nc
-)
-
-foreach (h4_file ${HDF4_REFERENCE_TEST_FILES})
- set (dest "${PROJECT_BINARY_DIR}/${h4_file}")
- #MESSAGE(STATUS " Copying ${HDF4_MFHDF_TEST_DIR}/${h4_file} to ${PROJECT_BINARY_DIR}/")
- add_custom_command (
- TARGET hdftest
- POST_BUILD
- COMMAND ${CMAKE_COMMAND}
- ARGS -E copy_if_different ${HDF4_MFHDF_TEST_DIR}/${h4_file} ${dest}
- )
-endforeach (h4_file ${HDF4_REFERENCE_TEST_FILES})
+set_target_properties (hdftest PROPERTIES FOLDER test)
+if (BUILD_SHARED_LIBS)
+ add_executable (hdftest-shared ${hdftest_SRCS})
+ TARGET_NAMING (hdftest-shared SHARED)
+ TARGET_C_PROPERTIES (hdftest-shared SHARED " " " ")
+ if (HDF4_BUILD_XDR_LIB)
+ target_link_libraries (hdftest-shared ${HDF4_MF_LIBSH_TARGET} ${HDF4_SRC_LIBSH_TARGET} ${HDF4_MF_XDR_LIBSH_TARGET} ${LINK_SHARED_LIBS})
+ else (HDF4_BUILD_XDR_LIB)
+ target_link_libraries (hdftest-shared ${HDF4_MF_LIBSH_TARGET} ${HDF4_SRC_LIBSH_TARGET} ${LINK_SHARED_LIBS})
+ endif (HDF4_BUILD_XDR_LIB)
+ set_target_properties (hdftest-shared PROPERTIES FOLDER test)
+endif (BUILD_SHARED_LIBS)
#-- Adding test for cdftest
add_executable (cdftest ${HDF4_MFHDF_TEST_SOURCE_DIR}/cdftest.c)
-TARGET_NAMING (cdftest ${LIB_TYPE})
-TARGET_C_PROPERTIES (cdftest " " " ")
+TARGET_NAMING (cdftest STATIC)
+TARGET_C_PROPERTIES (cdftest STATIC " " " ")
if (HDF4_BUILD_XDR_LIB)
- target_link_libraries (cdftest ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS} ${HDF4_MF_XDR_LIB_TARGET})
+ target_link_libraries (cdftest ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${HDF4_MF_XDR_LIB_TARGET} ${LINK_LIBS})
else (HDF4_BUILD_XDR_LIB)
target_link_libraries (cdftest ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
endif (HDF4_BUILD_XDR_LIB)
-
-#-- Copy all the dat files from the test directory into the source directory
-set (HDF4_REFERENCE2_TEST_FILES
- testout.sav
-)
-
-foreach (h4_file ${HDF4_REFERENCE2_TEST_FILES})
- set (dest "${PROJECT_BINARY_DIR}/${h4_file}")
- #MESSAGE(STATUS " Copying ${HDF4_MFHDF_TEST_DIR}/${h4_file} to ${PROJECT_BINARY_DIR}/")
- add_custom_command (
- TARGET cdftest
- POST_BUILD
- COMMAND ${CMAKE_COMMAND}
- ARGS -E copy_if_different ${HDF4_MFHDF_TEST_DIR}/${h4_file} ${dest}
- )
-endforeach (h4_file ${HDF4_REFERENCE2_TEST_FILES})
+set_target_properties (cdftest PROPERTIES FOLDER test)
+if (BUILD_SHARED_LIBS)
+ add_executable (cdftest-shared ${HDF4_MFHDF_TEST_SOURCE_DIR}/cdftest.c)
+ TARGET_NAMING (cdftest-shared SHARED)
+ TARGET_C_PROPERTIES (cdftest-shared SHARED " " " ")
+ if (HDF4_BUILD_XDR_LIB)
+ target_link_libraries (cdftest-shared ${HDF4_MF_LIBSH_TARGET} ${HDF4_SRC_LIBSH_TARGET} ${HDF4_MF_XDR_LIBSH_TARGET} ${LINK_SHARED_LIBS})
+ else (HDF4_BUILD_XDR_LIB)
+ target_link_libraries (cdftest-shared ${HDF4_MF_LIBSH_TARGET} ${HDF4_SRC_LIBSH_TARGET} ${LINK_SHARED_LIBS})
+ endif (HDF4_BUILD_XDR_LIB)
+ set_target_properties (cdftest-shared PROPERTIES FOLDER test)
+endif (BUILD_SHARED_LIBS)
#-- Adding test for hdfnctest
set (hdfnctest_SRCS
@@ -101,12 +92,24 @@ set (hdfnctest_SRCS
)
add_executable (hdfnctest ${hdfnctest_SRCS})
-TARGET_NAMING (hdfnctest ${LIB_TYPE})
-TARGET_C_PROPERTIES (hdfnctest " " " ")
+TARGET_NAMING (hdfnctest STATIC)
+TARGET_C_PROPERTIES (hdfnctest STATIC " " " ")
if (HDF4_BUILD_XDR_LIB)
- target_link_libraries (hdfnctest ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS} ${HDF4_MF_XDR_LIB_TARGET})
+ target_link_libraries (hdfnctest ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${HDF4_MF_XDR_LIB_TARGET} ${LINK_LIBS})
else (HDF4_BUILD_XDR_LIB)
target_link_libraries (hdfnctest ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS})
endif (HDF4_BUILD_XDR_LIB)
+set_target_properties (hdfnctest PROPERTIES FOLDER test)
+if (BUILD_SHARED_LIBS)
+ add_executable (hdfnctest-shared ${hdfnctest_SRCS})
+ TARGET_NAMING (hdfnctest-shared SHARED)
+ TARGET_C_PROPERTIES (hdfnctest-shared SHARED " " " ")
+ if (HDF4_BUILD_XDR_LIB)
+ target_link_libraries (hdfnctest-shared ${HDF4_MF_LIBSH_TARGET} ${HDF4_SRC_LIBSH_TARGET} ${HDF4_MF_XDR_LIBSH_TARGET} ${LINK_SHARED_LIBS})
+ else (HDF4_BUILD_XDR_LIB)
+ target_link_libraries (hdfnctest-shared ${HDF4_MF_LIBSH_TARGET} ${HDF4_SRC_LIBSH_TARGET} ${LINK_SHARED_LIBS})
+ endif (HDF4_BUILD_XDR_LIB)
+ set_target_properties (hdfnctest-shared PROPERTIES FOLDER test)
+endif (BUILD_SHARED_LIBS)
include (CMakeTests.cmake)
diff --git a/mfhdf/test/CMakeTests.cmake b/mfhdf/test/CMakeTests.cmake
index bf368bb..0bd9ccd 100644
--- a/mfhdf/test/CMakeTests.cmake
+++ b/mfhdf/test/CMakeTests.cmake
@@ -4,6 +4,62 @@
### T E S T I N G ###
##############################################################################
##############################################################################
+file (MAKE_DIRECTORY "${PROJECT_BINARY_DIR}/TEST")
+if (BUILD_SHARED_LIBS)
+ file (MAKE_DIRECTORY "${PROJECT_BINARY_DIR}/TEST-shared")
+endif (BUILD_SHARED_LIBS)
+
+#-- Copy all the dat files from the test directory into the source directory
+set (HDF4_REFERENCE_TEST_FILES
+ sds_szipped.dat
+ smallslice.0000.nc
+ test1.nc
+ Roy.nc
+ Roy-64.nc
+)
+
+foreach (h4_file ${HDF4_REFERENCE_TEST_FILES})
+ set (dest "${PROJECT_BINARY_DIR}/TEST/${h4_file}")
+ add_custom_command (
+ TARGET hdftest
+ POST_BUILD
+ COMMAND ${CMAKE_COMMAND}
+ ARGS -E copy_if_different ${HDF4_MFHDF_TEST_DIR}/${h4_file} ${dest}
+ )
+ if (BUILD_SHARED_LIBS)
+ set (dest "${PROJECT_BINARY_DIR}/TEST-shared/${h4_file}")
+ add_custom_command (
+ TARGET hdftest-shared
+ POST_BUILD
+ COMMAND ${CMAKE_COMMAND}
+ ARGS -E copy_if_different ${HDF4_MFHDF_TEST_DIR}/${h4_file} ${dest}
+ )
+ endif (BUILD_SHARED_LIBS)
+endforeach (h4_file ${HDF4_REFERENCE_TEST_FILES})
+
+#-- Copy all the dat files from the test directory into the source directory
+set (HDF4_REFERENCE2_TEST_FILES
+ testout.sav
+)
+
+foreach (h4_file ${HDF4_REFERENCE2_TEST_FILES})
+ set (dest "${PROJECT_BINARY_DIR}/TEST/${h4_file}")
+ add_custom_command (
+ TARGET cdftest
+ POST_BUILD
+ COMMAND ${CMAKE_COMMAND}
+ ARGS -E copy_if_different ${HDF4_MFHDF_TEST_DIR}/${h4_file} ${dest}
+ )
+ if (BUILD_SHARED_LIBS)
+ set (dest "${PROJECT_BINARY_DIR}/TEST-shared/${h4_file}")
+ add_custom_command (
+ TARGET cdftest-shared
+ POST_BUILD
+ COMMAND ${CMAKE_COMMAND}
+ ARGS -E copy_if_different ${HDF4_MFHDF_TEST_DIR}/${h4_file} ${dest}
+ )
+ endif (BUILD_SHARED_LIBS)
+endforeach (h4_file ${HDF4_REFERENCE2_TEST_FILES})
##############################################################################
##############################################################################
@@ -11,124 +67,183 @@
##############################################################################
##############################################################################
# Remove any output file left over from previous test run
+set (HDF4_TESTMFHDF_FILES
+ b150.hdf
+ bug376.hdf
+ cdfout.new
+ cdfout.new.err
+ chkbit.hdf
+ chktst.hdf
+ comptst1.hdf
+ comptst2.hdf
+ comptst3.hdf
+ comptst4.hdf
+ comptst5.hdf
+ comptst6.hdf
+ comptst7.hdf
+ datainfo_chk.hdf
+ datainfo_chkcmp.hdf
+ datainfo_cmp.hdf
+ datainfo_extend.hdf
+ datainfo_nodata.hdf
+ datainfo_simple.hdf
+ datasizes.hdf
+ dim.hdf
+ emptySDSs.hdf
+ extfile.hdf
+ exttst.hdf
+ idtypes.hdf
+ multidimvar.nc
+ nbit.hdf
+ onedimmultivars.nc
+ onedimonevar.nc
+ scaletst.hdf
+ sds1_dim1_samename.hdf
+ sds2_dim1_samename.hdf
+ SDS_8_sziped.hdf
+ SDS_16_sziped.hdf
+ SDS_32_sziped.hdf
+ sds_compressed.hdf
+ SD_externals
+ SDS_fl32_sziped.hdf
+ SDS_fl64_sziped.hdf
+ sds_szipped.hdf
+ SDSchunkedsziped.hdf
+ SDSchunkedsziped3d.hdf
+ SDSlongname.hdf
+ SDSunlimitedsziped.hdf
+ test.cdf
+ test1.hdf
+ test2.hdf
+ test_arguments.hdf
+ 'This file name has quite a few characters because it is used to test the fix of bugzilla 1331. It has to be at least this long to see.'
+ Unlim_dim.hdf
+ Unlim_inloop.hdf
+ vars_samename.hdf
+ tdfanndg.hdf
+ tdfansdg.hdf
+)
add_test (
NAME MFHDF_TEST-clearall-objects
COMMAND ${CMAKE_COMMAND}
- -E remove
- b150.hdf
- bug376.hdf
- cdfout.new
- cdfout.new.err
- chkbit.hdf
- chktst.hdf
- comptst1.hdf
- comptst2.hdf
- comptst3.hdf
- comptst4.hdf
- comptst5.hdf
- comptst6.hdf
- comptst7.hdf
- datainfo_chk.hdf
- datainfo_chkcmp.hdf
- datainfo_cmp.hdf
- datainfo_extend.hdf
- datainfo_nodata.hdf
- datainfo_simple.hdf
- datasizes.hdf
- dim.hdf
- emptySDSs.hdf
- extfile.hdf
- exttst.hdf
- idtypes.hdf
- multidimvar.nc
- nbit.hdf
- onedimmultivars.nc
- onedimonevar.nc
- scaletst.hdf
- sds1_dim1_samename.hdf
- sds2_dim1_samename.hdf
- SDS_8_sziped.hdf
- SDS_16_sziped.hdf
- SDS_32_sziped.hdf
- sds_compressed.hdf
- SD_externals
- SDS_fl32_sziped.hdf
- SDS_fl64_sziped.hdf
- sds_szipped.hdf
- SDSchunkedsziped.hdf
- SDSchunkedsziped3d.hdf
- SDSlongname.hdf
- SDSunlimitedsziped.hdf
- test.cdf
- test1.hdf
- test2.hdf
- test_arguments.hdf
- 'This file name has quite a few characters because it is used to test the fix of bugzilla 1331. It has to be at least this long to see.'
- Unlim_dim.hdf
- Unlim_inloop.hdf
- vars_samename.hdf
- tdfanndg.hdf
- tdfansdg.hdf
+ -E remove
+ ${HDF4_TESTMFHDF_FILES}
+ WORKING_DIRECTORY
+ ${PROJECT_BINARY_DIR}/TEST
)
-add_test (NAME hdftest COMMAND $<TARGET_FILE:hdftest>)
+add_test (NAME MFHDF_TEST-hdftest COMMAND $<TARGET_FILE:hdftest>)
set (passRegex "HDF-SD test passes")
-set_property (TEST hdftest PROPERTY PASS_REGULAR_EXPRESSION "${passRegex}")
-set_tests_properties (hdftest PROPERTIES DEPENDS MFHDF_TEST-clearall-objects LABELS ${PROJECT_NAME})
+set_tests_properties (MFHDF_TEST-hdftest PROPERTIES
+ PASS_REGULAR_EXPRESSION "${passRegex}"
+ DEPENDS MFHDF_TEST-clearall-objects
+ WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/TEST
+ LABELS ${PROJECT_NAME}
+)
-add_test (NAME cdftest COMMAND "${CMAKE_COMMAND}"
+add_test (NAME MFHDF_TEST-cdftest COMMAND "${CMAKE_COMMAND}"
-D "TEST_PROGRAM=$<TARGET_FILE:cdftest>"
-D "TEST_ARGS:STRING="
- -D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
+ -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/TEST"
-D "TEST_OUTPUT=cdfout.new"
-D "TEST_EXPECT=0"
-D "TEST_REFERENCE=testout.sav"
-P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake"
)
-set_tests_properties (cdftest PROPERTIES DEPENDS hdftest LABELS ${PROJECT_NAME})
+set_tests_properties (MFHDF_TEST-cdftest PROPERTIES
+ DEPENDS MFHDF_TEST-hdftest
+ WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/TEST
+ LABELS ${PROJECT_NAME}
+)
-add_test (NAME hdfnctest COMMAND $<TARGET_FILE:hdfnctest>)
+add_test (NAME MFHDF_TEST-hdfnctest COMMAND $<TARGET_FILE:hdfnctest>)
set (NCpassRegex "HDF-nc test passes")
-set_property (TEST hdfnctest PROPERTY PASS_REGULAR_EXPRESSION "${NCpassRegex}")
-set_tests_properties (hdfnctest PROPERTIES DEPENDS cdftest LABELS ${PROJECT_NAME})
+set_tests_properties (MFHDF_TEST-hdfnctest PROPERTIES
+ PASS_REGULAR_EXPRESSION "${NCpassRegex}"
+ DEPENDS MFHDF_TEST-cdftest
+ LABELS ${PROJECT_NAME}
+)
+if (BUILD_SHARED_LIBS)
+ add_test (
+ NAME MFHDF_TEST-shared-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ${HDF4_TESTMFHDF_FILES}
+ WORKING_DIRECTORY
+ ${PROJECT_BINARY_DIR}/TEST-shared
+ )
+
+ add_test (NAME MFHDF_TEST-hdftest-shared COMMAND $<TARGET_FILE:hdftest-shared>)
+ set (passRegex "HDF-SD test passes")
+ set_tests_properties (MFHDF_TEST-hdftest-shared PROPERTIES
+ PASS_REGULAR_EXPRESSION "${passRegex}"
+ DEPENDS MFHDF_TEST-shared-clearall-objects
+ WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/TEST-shared
+ LABELS ${PROJECT_NAME}
+ )
+
+ add_test (NAME MFHDF_TEST-cdftest-shared COMMAND "${CMAKE_COMMAND}"
+ -D "TEST_PROGRAM=$<TARGET_FILE:cdftest-shared>"
+ -D "TEST_ARGS:STRING="
+ -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/TEST-shared"
+ -D "TEST_OUTPUT=cdfout.new"
+ -D "TEST_EXPECT=0"
+ -D "TEST_REFERENCE=testout.sav"
+ -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake"
+ )
+ set_tests_properties (MFHDF_TEST-cdftest-shared PROPERTIES
+ DEPENDS MFHDF_TEST-hdftest-shared
+ WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/TEST-shared
+ LABELS ${PROJECT_NAME}
+ )
+
+ add_test (NAME MFHDF_TEST-hdfnctest-shared COMMAND $<TARGET_FILE:hdfnctest-shared>)
+ set (NCpassRegex "HDF-nc test passes")
+ set_tests_properties (MFHDF_TEST-hdfnctest-shared PROPERTIES
+ PASS_REGULAR_EXPRESSION "${NCpassRegex}"
+ DEPENDS MFHDF_TEST-cdftest-shared
+ WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/TEST-shared
+ LABELS ${PROJECT_NAME}
+ )
+endif (BUILD_SHARED_LIBS)
#-- Adding test for xdrtest
if (HDF4_BUILD_XDR_LIB)
add_executable (xdrtest ${HDF4_MFHDF_XDR_DIR}/xdrtest.c)
- TARGET_NAMING (xdrtest ${LIB_TYPE})
- TARGET_C_PROPERTIES (xdrtest " " " ")
+ TARGET_NAMING (xdrtest STATIC)
+ TARGET_C_PROPERTIES (xdrtest STATIC " " " ")
target_link_libraries (xdrtest ${HDF4_MF_LIB_TARGET} ${HDF4_SRC_LIB_TARGET} ${LINK_LIBS} ${HDF4_MF_XDR_LIB_TARGET})
- if (CYGWIN)
+ if (MSVC_VERSION LESS 1900)
add_custom_command (
- TARGET xdrtest
+ TARGET xdrtest
POST_BUILD
COMMAND ${CMAKE_COMMAND}
- ARGS -E copy_if_different ${HDF4_MFHDF_XDR_DIR}/xdrtest.cyg ${PROJECT_BINARY_DIR}/xdrtest.out
+ ARGS -E copy_if_different ${HDF4_MFHDF_XDR_DIR}/xdrtest.out ${PROJECT_BINARY_DIR}/TEST/xdrtest.out
)
- else (CYGWIN)
+ else (MSVC_VERSION LESS 1900)
add_custom_command (
- TARGET xdrtest
+ TARGET xdrtest
POST_BUILD
COMMAND ${CMAKE_COMMAND}
- ARGS -E copy_if_different ${HDF4_MFHDF_XDR_DIR}/xdrtest.out ${PROJECT_BINARY_DIR}/xdrtest.out
+ ARGS -E copy_if_different ${HDF4_MFHDF_XDR_DIR}/xdrtest.cyg ${PROJECT_BINARY_DIR}/TEST/xdrtest.out
)
- endif (CYGWIN)
+ endif (MSVC_VERSION LESS 1900)
if (HDF4_ENABLE_USING_MEMCHECKER)
- add_test (NAME xdrtest COMMAND $<TARGET_FILE:xdrtest>)
+ add_test (NAME MFHDF_TEST-xdrtest COMMAND $<TARGET_FILE:xdrtest>)
else (HDF4_ENABLE_USING_MEMCHECKER)
add_test (
- NAME xdrtest
+ NAME MFHDF_TEST-xdrtest
COMMAND "${CMAKE_COMMAND}"
-D "TEST_PROGRAM=$<TARGET_FILE:xdrtest>"
-D "TEST_ARGS:STRING="
- -D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
+ -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/TEST"
-D "TEST_OUTPUT=xdrtest.tst"
-D "TEST_EXPECT=0"
-D "TEST_REFERENCE=xdrtest.out"
-P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake"
)
endif (HDF4_ENABLE_USING_MEMCHECKER)
- set_tests_properties (xdrtest PROPERTIES DEPENDS hdfnctest LABELS ${PROJECT_NAME})
+ set_tests_properties (MFHDF_TEST-xdrtest PROPERTIES DEPENDS hdfnctest LABELS ${PROJECT_NAME})
endif (HDF4_BUILD_XDR_LIB)
diff --git a/mfhdf/test/Makefile.am b/mfhdf/test/Makefile.am
index fa92771..2298918 100644
--- a/mfhdf/test/Makefile.am
+++ b/mfhdf/test/Makefile.am
@@ -28,7 +28,7 @@ hdfnctest_SOURCES = hdfnctest.c tutils.c tncvargetfill.c tunlim.c \
hdfnctest_LDADD = $(LIBMFHDF) $(LIBHDF) @LIBS@ $(XDRLIB)
hdftest_SOURCES = hdftest.c tutils.c tchunk.c tcomp.c tcoordvar.c \
- tdim.c temptySDSs.c tattributes.c tfile.c \
+ tdim.c temptySDSs.c tattributes.c texternal.c tfile.c \
tmixed_apis.c tnetcdf.c trank0.c tsd.c tsdsprops.c \
tszip.c tattdatainfo.c tdatainfo.c tdatasizes.c
hdftest_LDADD = $(LIBMFHDF) $(LIBHDF) @LIBS@ $(XDRLIB)
diff --git a/mfhdf/test/Makefile.in b/mfhdf/test/Makefile.in
index 8007c1d..94d19f2 100644
--- a/mfhdf/test/Makefile.in
+++ b/mfhdf/test/Makefile.in
@@ -89,7 +89,20 @@ check_PROGRAMS = cdftest$(EXEEXT) hdfnctest$(EXEEXT) hdftest$(EXEEXT)
TESTS = $(am__EXEEXT_1) $(TEST_SCRIPT)
subdir = mfhdf/test
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -110,10 +123,10 @@ hdfnctest_DEPENDENCIES = $(LIBMFHDF) $(LIBHDF)
am_hdftest_OBJECTS = hdftest.$(OBJEXT) tutils.$(OBJEXT) \
tchunk.$(OBJEXT) tcomp.$(OBJEXT) tcoordvar.$(OBJEXT) \
tdim.$(OBJEXT) temptySDSs.$(OBJEXT) tattributes.$(OBJEXT) \
- tfile.$(OBJEXT) tmixed_apis.$(OBJEXT) tnetcdf.$(OBJEXT) \
- trank0.$(OBJEXT) tsd.$(OBJEXT) tsdsprops.$(OBJEXT) \
- tszip.$(OBJEXT) tattdatainfo.$(OBJEXT) tdatainfo.$(OBJEXT) \
- tdatasizes.$(OBJEXT)
+ texternal.$(OBJEXT) tfile.$(OBJEXT) tmixed_apis.$(OBJEXT) \
+ tnetcdf.$(OBJEXT) trank0.$(OBJEXT) tsd.$(OBJEXT) \
+ tsdsprops.$(OBJEXT) tszip.$(OBJEXT) tattdatainfo.$(OBJEXT) \
+ tdatainfo.$(OBJEXT) tdatasizes.$(OBJEXT)
hdftest_OBJECTS = $(am_hdftest_OBJECTS)
hdftest_DEPENDENCIES = $(LIBMFHDF) $(LIBHDF)
AM_V_P = $(am__v_P_ at AM_V@)
@@ -423,12 +436,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -471,11 +499,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
@@ -573,7 +604,7 @@ hdfnctest_SOURCES = hdfnctest.c tutils.c tncvargetfill.c tunlim.c \
hdfnctest_LDADD = $(LIBMFHDF) $(LIBHDF) @LIBS@ $(XDRLIB)
hdftest_SOURCES = hdftest.c tutils.c tchunk.c tcomp.c tcoordvar.c \
- tdim.c temptySDSs.c tattributes.c tfile.c \
+ tdim.c temptySDSs.c tattributes.c texternal.c tfile.c \
tmixed_apis.c tnetcdf.c trank0.c tsd.c tsdsprops.c \
tszip.c tattdatainfo.c tdatainfo.c tdatasizes.c
@@ -674,6 +705,7 @@ distclean-compile:
@AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/tdatasizes.Po at am__quote@
@AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/tdim.Po at am__quote@
@AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/temptySDSs.Po at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/texternal.Po at am__quote@
@AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/tfile.Po at am__quote@
@AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/tmixed_apis.Po at am__quote@
@AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/tncunlim.Po at am__quote@
diff --git a/mfhdf/test/Roy-64.nc b/mfhdf/test/Roy-64.nc
new file mode 100644
index 0000000..460d3b6
Binary files /dev/null and b/mfhdf/test/Roy-64.nc differ
diff --git a/mfhdf/test/Roy.nc b/mfhdf/test/Roy.nc
new file mode 100644
index 0000000..6849aef
Binary files /dev/null and b/mfhdf/test/Roy.nc differ
diff --git a/mfhdf/test/hdfnctest.c b/mfhdf/test/hdfnctest.c
index c9e07f5..e2e64fb 100644
--- a/mfhdf/test/hdfnctest.c
+++ b/mfhdf/test/hdfnctest.c
@@ -10,10 +10,6 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5109 $";
-#endif
-
#include "mfhdf.h"
#include "hdftest.h"
diff --git a/mfhdf/test/hdftest.c b/mfhdf/test/hdftest.c
index c6f5ec7..be7a794 100644
--- a/mfhdf/test/hdftest.c
+++ b/mfhdf/test/hdftest.c
@@ -11,10 +11,6 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5218 $";
-#endif
-
/* $Id: hdftest.c 5218 2009-08-29 04:21:49Z bmribler $ */
#include "mfhdf.h"
@@ -67,6 +63,7 @@ extern int test_dimension();
extern int test_attributes();
extern int test_datasizes();
extern int test_datainfo();
+extern int test_external();
extern int test_att_ann_datainfo();
int
@@ -1256,227 +1253,10 @@ main(int argc, char *argv[])
status = SDend(f1);
CHECK(status, FAIL, "SDend");
- /*
- * Test the External File storage stuff
- */
-
- /* Create file 'exttst.hdf' */
- fext = SDstart(EXTTST, DFACC_CREATE);
- CHECK(fext, FAIL, "SDstart");
-
- /* Create data set 'ExteneralDataSet' in file 'exttst.hdf' */
- nt = DFNT_INT32 | DFNT_NATIVE;
- dimsize[0] = 5;
- dimsize[1] = 5;
- newsds = SDcreate(fext, "ExternalDataSet", nt, 2, dimsize);
- CHECK(newsds, FAIL, "SDcreate: Failed to create a new data set 'ExternalDataSet' for external promotion");
-
- /* initialize data to write out */
- for(i = 0; i < 25; i++)
- idata[i] = i;
-
- /* Write data to all of data set 'ExternalDataSet' in file 'exttst.hdf' */
- start[0] = start[1] = 0;
- end[0] = end[1] = 5;
- status = SDwritedata(newsds, start, NULL, end, (VOIDP) idata);
- CHECK(status, FAIL, "SDwritedata");
-
- /* Now promote data set 'ExternalDataSet' to an external data set
- in the file 'extfile.hdf' */
- status = SDsetexternalfile(newsds, EXTFILE, 0);
- CHECK(status, FAIL, "SDsetexternalfile");
-
- for(i = 0; i < 10; i++)
- idata[i] = i * 10;
-
- /* Now write data to part of newly promoted data set 'ExternalDataSet'
- which is now an external data set */
- start[0] = start[1] = 0;
- end[0] = 2;
- end[1] = 5;
- status = SDwritedata(newsds, start, NULL, end, (VOIDP) idata);
- CHECK(status, FAIL, "SDwritedata");
-
- /* end access to data set 'ExternalDataSet' */
- status = SDendaccess(newsds);
- CHECK(status, FAIL, "SDendaccess");
-
- /* need to close to flush external info to file 'exttst.hdf' */
- status = SDend(fext);
- CHECK(status, FAIL, "SDend");
-
- /* Open file 'exttst.hdf' again */
- fext = SDstart(EXTTST, DFACC_RDWR);
- CHECK(fext, FAIL, "SDstart (again)");
-
- /* Create a "wrapper" data set in file 'exttst.hdf'. i.e. a data set
- that will point to data in an already existing external file */
- dimsize[0] = 3;
- dimsize[1] = 3;
- newsds2 = SDcreate(fext, "WrapperDataSet", nt, 2, dimsize);
- CHECK(newsds2, FAIL, "SDcreate:Failed to create a new data set('WrapperDataSet') for external wrapping");
-
- /* Promote the regular data set to a "wrapper" one by making
- it point to where the real data is in the external file 'extfile.hdf'.
- Note that only a subset of the real data('ExternalDataSet') is pointed to
- by the "wrapper" data set. */
- offset = DFKNTsize(nt) * 2;
- status = SDsetexternalfile(newsds2, EXTFILE, offset);
- CHECK(status, FAIL, "SDsetexternalfile");
-
- /* now read data back from this "wrapper" data set */
- start[0] = start[1] = 0;
- end[0] = end[1] = 3;
- status = SDreaddata(newsds2, start, NULL, end, (VOIDP) idata);
- CHECK(status, FAIL, "SDreaddata");
-
- /* verify data read back in */
- for(i = 0; i < 8; i++)
- {
- if(idata[i] != (i + 2) * 10)
- {
- fprintf(stderr, "Bogus val in loc %d in wrapper dset want %d got %ld\n",
- i, (i + 2) * 10, (long)idata[i]);
- num_errs++;
- }
- }
-
- if(idata[8] != 10)
- {
- fprintf(stderr, "Bogus val in last loc in wrapper dset want 10 got %ld\n",
- (long)idata[8]);
- num_errs++;
- }
-
- /* End access to data set "WrapperDataSet" */
- status = SDendaccess(newsds2);
- CHECK(status, FAIL, "SDendaccess");
-
- /* Create data set 'NoExteneralDataSet' in file 'exttst.hdf' */
- nt = DFNT_INT32 | DFNT_NATIVE;
- dimsize[0] = 5;
- dimsize[1] = 5;
- noextsds = SDcreate(fext, "NoExternalDataSet", nt, 2, dimsize);
- CHECK(noextsds, FAIL, "SDcreate: Failed to create a new data set 'NoExternalDataSet' for testing SDSgetexternalfile on a non-external element");
-
- /* initialize data to write out */
- for(i = 0; i < 25; i++)
- idata[i] = i;
-
- /* Write data to all of data set 'NoExternalDataSet' in file 'exttst.hdf' */
- start[0] = start[1] = 0;
- end[0] = end[1] = 5;
- status = SDwritedata(noextsds, start, NULL, end, (VOIDP) idata);
- CHECK(status, FAIL, "SDwritedata");
-
- status = SDendaccess(noextsds);
- CHECK(status, FAIL, "SDendaccess");
-
- /* Close file 'exttst.hdf' */
- status = SDend(fext);
- CHECK(status, FAIL, "SDend");
-
- /* Test getting external file info on data set "ExternalDataSet" and test
- not able to get external file info on data set "NoExternalDataSet" */
- {
- intn name_len=0;
- char *extfile_name;
- int32 offset=0, length=0;
- int32 sds_id, sds_index;
-
- /* Open file 'exttst.hdf' again */
- fext = SDstart(EXTTST, DFACC_RDWR);
- CHECK(fext, FAIL, "SDstart (again)");
-
- /* Get index of "ExternalDataSet" and get access to it */
- sds_index = SDnametoindex(fext, "ExternalDataSet");
- CHECK(sds_index, FAIL, "SDnametoindex");
- sds_id = SDselect(fext, sds_index);
- CHECK(sds_id, FAIL, "SDselect");
-
- /* Call SDgetexternalfile the first time passing in 0 for external
- file name length to get the actual length - SDgetexternalfile is
- deprecated as of 4.2.7 */
- name_len = SDgetexternalfile(sds_id, 0, NULL, NULL);
- VERIFY(name_len, (intn)HDstrlen(EXTFILE), "SDgetexternalfile");
-
- extfile_name = (char *) HDmalloc(sizeof(char *) * (name_len+1));
- CHECK_ALLOC(extfile_name, "extfile_name", "SDgetexternalfile");
- HDmemset(extfile_name, '\0', name_len+1);
-
- /* Call SDgetexternalfile again and get the external file info */
- name_len = SDgetexternalfile(sds_id, name_len+1, extfile_name, &offset);
- VERIFY(name_len, (intn)HDstrlen(EXTFILE), "SDgetexternalfile");
- VERIFY_CHAR(EXTFILE, extfile_name, "SDgetexternalfile");
-
- /* Call SDgetexternalinfo the first time passing in 0 for external
- file name length to get the actual length */
- name_len = SDgetexternalinfo(sds_id, 0, NULL, NULL, NULL);
- VERIFY(name_len, (intn)HDstrlen(EXTFILE), "SDgetexternalinfo");
-
- /* Test passing in NULL pointer for external file name buffer, should
- fail gracefully */
- {
- char *null_buffer=NULL;
- intn ret_code=0;
- ret_code = SDgetexternalinfo(sds_id, name_len+1, null_buffer, &offset, &length);
- VERIFY(ret_code, FAIL, "SDgetexternalinfo");
- }
-
- extfile_name = (char *) HDmalloc(sizeof(char *) * (name_len+1));
- CHECK_ALLOC(extfile_name, "extfile_name", "SDgetexternalinfo");
- HDmemset(extfile_name, '\0', name_len+1);
-
- /* Call SDgetexternalinfo again and get the external file info */
- name_len = SDgetexternalinfo(sds_id, name_len+1, extfile_name, &offset, &length);
- VERIFY(name_len, (intn)HDstrlen(EXTFILE), "SDgetexternalinfo");
- VERIFY_CHAR(EXTFILE, extfile_name, "SDgetexternalinfo");
-
- /* Test passing in smaller buffer for external file name than actual;
- name should be truncated */
- {
- char *short_name = (char *) HDmalloc(sizeof(char *) * (name_len));
- HDmemset(short_name, '\0', name_len);
- HDstrncpy(short_name, EXTFILE, name_len-2);
- HDmemset(extfile_name, '\0', name_len);
-
- /* Call SDgetexternalinfo again with smaller buffer size and verify
- that SDgetexternalinfo reads the name truncated to the given
- buffer size*/
- name_len = SDgetexternalinfo(sds_id, name_len-2, extfile_name, &offset, &length);
- VERIFY(name_len, (intn)HDstrlen(extfile_name), "SDgetexternalinfo");
- VERIFY_CHAR(short_name, extfile_name, "SDgetexternalinfo");
- HDfree(short_name);
- }
-
- status = SDendaccess(sds_id);
- CHECK(status, FAIL, "SDendaccess");
-
- /* Get index of "NoExternalDataSet" and get access to it */
- sds_index = SDnametoindex(fext, "NoExternalDataSet");
- CHECK(sds_index, FAIL, "SDnametoindex");
- sds_id = SDselect(fext, sds_index);
- CHECK(sds_id, FAIL, "SDselect");
-
- /* Call SDgetexternalfile on the SDS that doesn't have external
- element, should fail - SDgetexternalfile is deprecated as of
- 4.2.7 */
- name_len = SDgetexternalfile(sds_id, 0, NULL, NULL);
- VERIFY(name_len, FAIL, "SDgetexternalfile");
-
- /* Call SDgetexternalinfo on the SDS that doesn't have external
- element, should return 0 for length of external file name */
- name_len = SDgetexternalinfo(sds_id, 0, NULL, NULL, NULL);
- VERIFY(name_len, 0, "SDgetexternalinfo");
-
- status = SDendaccess(sds_id);
- CHECK(status, FAIL, "SDendaccess");
-
- /* Close file 'exttst.hdf' */
- status = SDend(fext);
- CHECK(status, FAIL, "SDend");
- }
-
+/****************************************************************
+ * Moved testing of external storage stuff into texternal.c *
+ * -BMR, Nov 16, 2015 *
+ ****************************************************************/
#ifdef NBIT_TEST
@@ -1638,6 +1418,11 @@ main(int argc, char *argv[])
status = test_coordvar();
num_errs = num_errs + status;
+ /* BMR: Added a test routine dedicated for testing functionality
+ related to external data (in textdata.c) - 10/29/15 */
+ status = test_external();
+ num_errs = num_errs + status;
+
/* BMR: Verifies that some functions will not fail even though SZIP
library is not present or only decoder is available. */
status = test_szip_compression(); /* in tszip.c */
diff --git a/mfhdf/test/hdftest.h b/mfhdf/test/hdftest.h
index 5a7fc02..6a04a86 100644
--- a/mfhdf/test/hdftest.h
+++ b/mfhdf/test/hdftest.h
@@ -49,3 +49,32 @@
func_name, buf_name ); \
exit(1); } \
}
+
+/*************************** Utility Functions ***************************/
+
+/* Calls SDcreate, SDwritedata, and SDendaccess */
+int32 make_SDS(int32 sd_id, char* sds_name, int32 type, int32 rank,
+ int32* dim_sizes, int32 unlim_dim, VOIDP written_data);
+
+/* Calls SDcreate, SDsetcompress, SDwritedata, and SDendaccess */
+int32 make_CompSDS(int32 sd_id, char* sds_name, int32 type, int32 rank,
+ int32* dim_sizes, VOIDP written_data);
+
+/* Calls SDcreate, SDsetexternalfile, SDwritedata, and SDendaccess */
+int32 make_Ext3D_SDS(int32 sd_id, char* sds_name, int32 type, int32 rank,
+ int32* dim_sizes, VOIDP written_data,
+ int32 offset, char* ext_file_name);
+
+/* Calls SDnametoindex and SDselect */
+int32 get_SDSbyName(int32 sd_id, char* sds_name);
+
+/* Calls get_SDSbyName, SDwritedata, and SDendaccess */
+int32 append_Data2SDS(int32 sd_id, char* sds_name, int32* start, int32* edges, void* ap_data);
+
+/* Calls SDgetdatasize then verify the size against data_size */
+void verify_datasize(int32 sds_id, int32 data_size, char* sds_name);
+
+/* Find and open an SDS by name */
+int32 get_SDSbyName(int32 sd_id, char* sds_name);
+
+
diff --git a/mfhdf/test/tcoordvar.c b/mfhdf/test/tcoordvar.c
index 89f76ad..8c76691 100644
--- a/mfhdf/test/tcoordvar.c
+++ b/mfhdf/test/tcoordvar.c
@@ -234,7 +234,7 @@ static intn test_dim1_SDS1(void)
for (idx1 = 0; idx1 < dimsize[0]; idx1++)
if (out_data[idx1] != sds1_data[idx1])
{
- fprintf(stderr, "Read value (%f) differs from written (%f) at [%d]\n", out_data[idx1], idx1);
+ fprintf(stderr, "Read value (%f) differs from written (%f) at [%d]\n", out_data[idx1], sds1_data[idx1], idx1);
num_errs++;
}
diff --git a/mfhdf/test/texternal.c b/mfhdf/test/texternal.c
new file mode 100644
index 0000000..25303b6
--- /dev/null
+++ b/mfhdf/test/texternal.c
@@ -0,0 +1,814 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF. The full HDF copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at *
+ * http://hdfgroup.org/products/hdf4/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help at hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "mfhdf.h"
+
+#ifdef HDF
+
+#include "hdftest.h"
+
+#define EXTTST "exttst.hdf" /* main file for external file test */
+#define EXTFILE "SD_external_file" /* file to contain external data */
+#define EXTFILE1 "SD_external_file 2" /* file to contain external data */
+#define EXTSDS "ExternalDataSet" /* data set written with external data
+ right after creation */
+#define EXTSDS2 "ExternalDataSet 2" /* data set first empty then written
+ with external data */
+#define WRAPSDS "WrapperDataSet" /* data set pointing to external data */
+#define NOEXTSDS "NoExternalDataSet" /* data set with data in main file */
+#define EXTFILE2 "ExternalSDSexisting" /* data set having data */
+#define EXTFILE3 "ShouldNotHappen" /* data set already is external */
+#define OFFSET 24
+#define NUM_SDS 4
+#define SDS1 "Dataset 1"
+#define SDS2 "Dataset 2"
+#define SDS3 "Dataset 3"
+#define SDS4 "Dataset 4"
+#define RANK3 3
+#define X_LENGTH 4
+#define Y_LENGTH 5
+#define Z_LENGTH 6
+#define RANK2 2
+#define DIM1 5
+#define DIM2 5
+
+void verify_data(int32 sd_id, int32 sds_ind);
+
+/* Same set of data for every 3-dim data set. Initialized in test_external(). */
+int32 written_data[Z_LENGTH][Y_LENGTH][X_LENGTH];
+
+/* Appended data or hyperslab */
+int32 ap_data[1][Y_LENGTH][X_LENGTH];
+
+/********************************************************************
+ Name: test_setexternal() - tests basic funtionalities in storing
+ data in an external file
+
+ Description:
+ This function tests three scenarios:
+ - Data written in main file then moved to external file and modified
+ - Data in external file is pointed to by a "wrapper" data set
+ - Empty data set is written with data in the external file
+
+ The main contents include:
+ - Data written in main file then moved and modified
+ + create and write the entire data set in the main file
+ + move the data to the external file with SDsetexternalfile
+ + modify this external data
+ - Data in external file pointed to by a "wrapper" data set
+ + create a data set in the main file, i.e., the wrapper data set
+ + have the wrapper pointed to part of the external data that
+ belongs to the external data set above
+ + read the wrapper's data and verify
+ - Empty data set is written with data in the external file
+ + create a data set in the main file and close it
+ + re-open the data set
+ + promote it to external data set, i.e., SDsetexternalfile
+ + write data to the data set
+
+ Return value:
+ The number of errors occurred in this routine.
+
+ BMR - Jan 16, 2009
+*********************************************************************/
+static int test_setexternal()
+{
+ int32 sd_id, sds_id;
+ int32 start[2], edges[2], dimsizes[2], nt, offset;
+ int32 idata[DIM1 * DIM2];
+ int ii;
+ intn status;
+ intn num_errs = 0; /* number of errors in compression test so far */
+
+ /* Create an HDF file */
+ sd_id = SDstart(EXTTST, DFACC_CREATE);
+ CHECK(sd_id, FAIL, "SDstart");
+
+ /* Create a data set in the HDF file */
+ nt = DFNT_INT32 | DFNT_NATIVE;
+ dimsizes[0] = DIM1;
+ dimsizes[1] = DIM2;
+ sds_id = SDcreate(sd_id, EXTSDS, nt, RANK2, dimsizes);
+ CHECK(sds_id, FAIL, "SDcreate: Failed to create a new data set 'ExternalDataSet' for external promotion");
+
+ /* Initialize data to write out */
+ for(ii = 0; ii < dimsizes[0] * dimsizes[1]; ii++)
+ idata[ii] = ii;
+
+ /* Write data to the entire data set */
+ start[0] = start[1] = 0;
+ edges[0] = dimsizes[0];
+ edges[1] = dimsizes[1];
+
+ status = SDwritedata(sds_id, start, NULL, edges, (VOIDP) idata);
+ CHECK(status, FAIL, "SDwritedata");
+
+ /* Promote the data set to an external data set by storing its data in
+ the external file EXTFILE */
+ status = SDsetexternalfile(sds_id, EXTFILE, 0);
+ CHECK(status, FAIL, "SDsetexternalfile");
+
+ for(ii = 0; ii < 3*dimsizes[1]; ii++)
+ idata[ii] = ii * 10;
+
+ /* Write data to part of the newly promoted data set which now contains
+ data in the external file */
+ start[0] = start[1] = 0;
+ edges[0] = 3;
+ edges[1] = dimsizes[1];
+ status = SDwritedata(sds_id, start, NULL, edges, (VOIDP) idata);
+ CHECK(status, FAIL, "SDwritedata");
+
+ /* End access to the data set */
+ status = SDendaccess(sds_id);
+ CHECK(status, FAIL, "SDendaccess");
+
+ /* Need to close to flush external info to the HDF file */
+ status = SDend(sd_id);
+ CHECK(status, FAIL, "SDend");
+
+ /* Open the HDF file again */
+ sd_id = SDstart(EXTTST, DFACC_RDWR);
+ CHECK(sd_id, FAIL, "SDstart (again)");
+
+ /* Create a data set in the HDF file */
+ dimsizes[0] = 3;
+ dimsizes[1] = 3;
+ sds_id = SDcreate(sd_id, WRAPSDS, nt, 2, dimsizes);
+ CHECK(sds_id, FAIL, "SDcreate:Failed to create a new data set('WrapperDataSet') for external wrapping");
+
+ /* Promote the regular data set to a "wrapper" one by making it point to
+ the actual data in the external file 'extfile.hdf'.
+ Note that only a subset of the existing data (which belongs to the
+ previous data set, EXTSDS) is pointed to by the "wrapper"
+ data set. The subset is specified by dimsizes array */
+ offset = DFKNTsize(nt) * 2;
+ status = SDsetexternalfile(sds_id, EXTFILE, offset);
+ CHECK(status, FAIL, "SDsetexternalfile");
+
+ /* End access to the data set */
+ status = SDendaccess(sds_id);
+ CHECK(status, FAIL, "SDendaccess");
+
+ /* Need to close to flush external info to the HDF, or main, file */
+ status = SDend(sd_id);
+ CHECK(status, FAIL, "SDend");
+
+ /* Open the HDF file again */
+ sd_id = SDstart(EXTTST, DFACC_RDWR);
+ CHECK(sd_id, FAIL, "SDstart (again)");
+
+ sds_id = get_SDSbyName(sd_id, WRAPSDS);
+
+ /* Read and verify data via the "wrapper" data set */
+ {
+ int32 odata[9];
+
+ /* Read data back from this "wrapper" data set */
+ start[0] = start[1] = 0;
+ edges[0] = 3;
+ edges[1] = 3;
+ status = SDreaddata(sds_id, start, NULL, edges, (VOIDP) odata);
+ CHECK(status, FAIL, "SDreaddata");
+
+ /* Verify data read back in */
+ for(ii = 0; ii < edges[0]*edges[1]; ii++)
+ {
+ if(odata[ii] != (ii + 2) * 10)
+ {
+ fprintf(stderr, "Bogus val in loc %d in wrapper dset want %d got %ld\n",
+ ii, (ii + 2) * 10, (long)odata[ii]);
+ num_errs++;
+ }
+ }
+ }
+
+ /* End access to the wrapper data set */
+ status = SDendaccess(sds_id);
+ CHECK(status, FAIL, "SDendaccess");
+
+ /* Create an empty data set then write external data to it */
+
+ /* Create data set EXTSDS2 */
+ nt = DFNT_INT32 | DFNT_NATIVE;
+ dimsizes[0] = X_LENGTH;
+ dimsizes[1] = Y_LENGTH;
+ sds_id = SDcreate(sd_id, EXTSDS2, nt, 2, dimsizes);
+ CHECK(sds_id, FAIL, "SDcreate: Failed to create a new data set for testing writing external data to an empty data set");
+
+ /* Close data sets */
+ status = SDendaccess(sds_id);
+
+ /* Re-open the named data set, id is checked by callee */
+ sds_id = get_SDSbyName(sd_id, EXTSDS2);
+
+ /* Make this data set to have external data in a new file */
+ status = SDsetexternalfile(sds_id, EXTFILE1, 0);
+
+ /* initialize data to write out */
+ for(ii = 0; ii < dimsizes[0]*dimsizes[1]; ii++)
+ idata[ii] = ii;
+
+ /* Write data to all of data set EXTSDS2 in the file EXTFILE1 */
+ start[0] = start[1] = 0;
+ edges[0] = dimsizes[0];
+ edges[1] = dimsizes[1];
+ status = SDwritedata(sds_id, start, NULL, edges, (VOIDP) idata);
+ CHECK(status, FAIL, "SDwritedata");
+
+ /* Close data sets */
+ status = SDendaccess(sds_id);
+ CHECK(status, FAIL, "SDendaccess");
+
+ /* Close HDF file */
+ status = SDend(sd_id);
+ CHECK(status, FAIL, "SDend");
+
+ /* Return the number of errors that's been kept track of so far */
+ return num_errs;
+} /* test_setexternal() */
+
+/********************************************************************
+ Name: test_getexternal() - tests getting external file info on
+ various data sets
+
+ Description:
+ The main contents include:
+ - Create and write a data set in the main file only, i.e., non-external
+ data set
+ - Get and verify external file info for external data set and
+ wrapper data set
+ - Verifying that there is no external file info from the non-external
+ data set
+
+ Return value:
+ The number of errors occurred in this routine.
+
+ BMR - Jan 16, 2009
+*********************************************************************/
+static int test_getexternal()
+{
+ int32 sd_id, sds_id, noextsds;
+ intn name_len=0;
+ char *extfile_name;
+ int32 offset=0, length=0;
+ int32 start[2], edges[2], dimsizes[2], nt;
+ int32 idata[DIM1*DIM2];
+ int ii;
+ intn num_errs = 0; /* number of errors in compression test so far */
+ intn status = SUCCEED;
+
+ /* Open file 'exttst.hdf' again */
+ sd_id = SDstart(EXTTST, DFACC_RDWR);
+ CHECK(sd_id, FAIL, "SDstart (again)");
+
+ /* Create and write a data set in the main file */
+
+ /* Create data set NOEXTSDS */
+ nt = DFNT_INT32 | DFNT_NATIVE;
+ dimsizes[0] = DIM1;
+ dimsizes[1] = DIM2;
+ noextsds = SDcreate(sd_id, NOEXTSDS, nt, 2, dimsizes);
+ CHECK(noextsds, FAIL, "SDcreate: Failed to create a new data set 'NoExternalDataSet' for testing SDSgetexternalfile on a non-external element");
+
+ /* initialize data to write out */
+ for(ii = 0; ii < 25; ii++)
+ idata[ii] = ii;
+
+ /* Write data to all of data set NOEXTSDS in main file */
+ start[0] = start[1] = 0;
+ edges[0] = edges[1] = DIM1;
+ status = SDwritedata(noextsds, start, NULL, edges, (VOIDP) idata);
+ CHECK(status, FAIL, NOEXTSDS);
+ CHECK(status, FAIL, "SDwritedata");
+
+ /* Close data sets */
+ status = SDendaccess(noextsds);
+ CHECK(status, FAIL, "SDendaccess");
+
+ /*
+ * Test getting external info on an external data set; should return the
+ * external file information successfully.
+ */
+
+ /* Get access to the data set named EXTSDS */
+ sds_id = get_SDSbyName(sd_id, EXTSDS);
+
+ /* Call SDgetexternalfile the first time passing in 0 for external
+ file name length to get the actual length - SDgetexternalfile is
+ deprecated as of 4.2.7 because it missed the length argument */
+ { /* deprecated */
+ name_len = SDgetexternalfile(sds_id, 0, NULL, NULL);
+ VERIFY(name_len, (intn)HDstrlen(EXTFILE), "SDgetexternalfile");
+
+ extfile_name = (char *) HDmalloc(sizeof(char *) * (name_len+1));
+ CHECK_ALLOC(extfile_name, "extfile_name", "SDgetexternalfile");
+ HDmemset(extfile_name, '\0', name_len+1);
+
+ /* Call SDgetexternalfile again and get the external file info */
+ name_len = SDgetexternalfile(sds_id, name_len+1, extfile_name, &offset);
+ VERIFY(name_len, (intn)HDstrlen(EXTFILE), "SDgetexternalfile");
+ VERIFY_CHAR(EXTFILE, extfile_name, "SDgetexternalfile");
+ }
+
+ /* Call SDgetexternalinfo the first time passing in 0 for external
+ file name length to get the actual length */
+ name_len = SDgetexternalinfo(sds_id, 0, NULL, NULL, NULL);
+ VERIFY(name_len, (intn)HDstrlen(EXTFILE), "SDgetexternalinfo");
+
+ /* Test passing in NULL pointer for external file name buffer, should
+ fail gracefully */
+ {
+ char *null_buffer=NULL;
+ intn ret_code=0;
+ ret_code = SDgetexternalinfo(sds_id, name_len+1, null_buffer, &offset, &length);
+ VERIFY(ret_code, FAIL, "SDgetexternalinfo");
+ }
+
+ /* Prepare buffer for external file name */
+ extfile_name = (char *) HDmalloc(sizeof(char *) * (name_len+1));
+ CHECK_ALLOC(extfile_name, "extfile_name", "SDgetexternalinfo");
+ HDmemset(extfile_name, '\0', name_len+1);
+
+ /* Call SDgetexternalinfo again and get the external file info */
+ name_len = SDgetexternalinfo(sds_id, name_len+1, extfile_name, &offset, &length);
+ VERIFY(name_len, (intn)HDstrlen(EXTFILE), "SDgetexternalinfo");
+ VERIFY_CHAR(EXTFILE, extfile_name, "SDgetexternalinfo");
+
+ /* Test passing in smaller buffer for external file name than actual;
+ name should be truncated */
+ {
+ char *short_name = (char *) HDmalloc(sizeof(char *) * (name_len));
+ HDmemset(short_name, '\0', name_len);
+ HDstrncpy(short_name, EXTFILE, name_len-2);
+ HDmemset(extfile_name, '\0', name_len);
+
+ /* Call SDgetexternalinfo again with smaller buffer size and verify
+ that SDgetexternalinfo reads the name truncated to the given
+ buffer size*/
+ name_len = SDgetexternalinfo(sds_id, name_len-2, extfile_name, &offset, &length);
+ VERIFY(name_len, (intn)HDstrlen(extfile_name), "SDgetexternalinfo");
+ VERIFY_CHAR(short_name, extfile_name, "SDgetexternalinfo");
+ HDfree(short_name);
+ }
+
+ /* Close the data set */
+ status = SDendaccess(sds_id);
+ CHECK(status, FAIL, "SDendaccess");
+
+ /*
+ * Test getting external info on a wrapper data set; should return the
+ * external file information successfully.
+ */
+
+ /* Get access to the data set named WRAPSDS */
+ sds_id = get_SDSbyName(sd_id, WRAPSDS);
+
+ /* Call SDgetexternalinfo the first time passing in 0 for external
+ file name length to get the actual length */
+ name_len = SDgetexternalinfo(sds_id, 0, NULL, NULL, NULL);
+ VERIFY(name_len, (intn)HDstrlen(EXTFILE), "SDgetexternalinfo");
+
+ /* Test passing in NULL pointer for external file name buffer, should
+ fail gracefully */
+ {
+ char *null_buffer=NULL;
+ intn ret_code=0;
+ ret_code = SDgetexternalinfo(sds_id, name_len+1, null_buffer, &offset, &length);
+ VERIFY(ret_code, FAIL, "SDgetexternalinfo");
+ }
+
+ extfile_name = (char *) HDmalloc(sizeof(char *) * (name_len+1));
+ CHECK_ALLOC(extfile_name, "extfile_name", "SDgetexternalinfo");
+ HDmemset(extfile_name, '\0', name_len+1);
+
+ /* Call SDgetexternalinfo again and get the external file info */
+ name_len = SDgetexternalinfo(sds_id, name_len+1, extfile_name, &offset, &length);
+ VERIFY(name_len, (intn)HDstrlen(EXTFILE), "SDgetexternalinfo");
+ VERIFY_CHAR(EXTFILE, extfile_name, "SDgetexternalinfo");
+
+ /*
+ * Test getting external info on a non-external data set; should return
+ * no external file information
+ */
+
+ /* Get access to the data set named NOEXTSDS */
+ noextsds = get_SDSbyName(sd_id, NOEXTSDS);
+
+ /* Call SDgetexternalinfo on the SDS that doesn't have external
+ element, should return 0 for length of external file name */
+ name_len = SDgetexternalinfo(noextsds, 0, NULL, NULL, NULL);
+ VERIFY(name_len, 0, "SDgetexternalinfo");
+
+ status = SDendaccess(noextsds);
+ CHECK(status, FAIL, "SDendaccess");
+
+ /* Close file 'exttst.hdf' */
+ status = SDend(sd_id);
+ CHECK(status, FAIL, "SDend");
+
+ /* Return the number of errors that's been kept track of so far */
+ return num_errs;
+} /* test_getexternal() */
+
+/********************************************************************
+ Name: test_mult_setexternal() - tests setting external multiple times
+
+ Description:
+ The main contents include:
+ - Create and write an external data set to FILE_NAME, then close
+ it and the file.
+ - Re-open the file
+ - Call SDsetexternal on the data set, which is already an external SDS.
+ This is to make sure HDFFR-1516 is fixed.
+ - Re-open the file and verify data of each data set.
+
+ Return value:
+ The number of errors occurred in this routine.
+
+ BMR - Jan 16, 2009
+*********************************************************************/
+int test_mult_setexternal()
+{
+ int32 sd_id, sds1_id, sds2_id, sds3_id, sds4_id;
+ int32 ap_start[3], ap_edges[3], dim_sizes[3];
+ int32 sds1_size=0, sds2_size=0, sds3_size=0;
+ char *extfile_name;
+ intn name_len = 0;
+ intn status = SUCCEED;
+ intn num_errs = 0; /* number of errors in compression test so far */
+
+ /* Create the file and initialize the SD interface */
+ sd_id = SDstart (EXTTST, DFACC_CREATE);
+ CHECK(status, FAIL, "SDstart");
+
+ dim_sizes[0] = Z_LENGTH;
+ dim_sizes[1] = Y_LENGTH;
+ dim_sizes[2] = X_LENGTH;
+
+ /* Create data set SDS1 and write data to the external file; the returned
+ value is the size of the data had been written for this sds */
+ sds1_size = make_Ext3D_SDS(sd_id, SDS1, DFNT_INT32, 3, dim_sizes, (VOIDP)written_data, OFFSET, EXTFILE2);
+
+ /* Close the file to flush */
+ status = SDend(sd_id);
+ CHECK(status, FAIL, "SDend");
+
+ /* Re-open the file */
+ sd_id = SDstart (EXTTST, DFACC_RDWR);
+ CHECK(status, FAIL, "SDstart");
+
+ /* Move data from an external data set, SDS1, into the external file again.
+ This simulates the situation of the example being run more than once,
+ causing failure in daily test. This action should have no effect now.
+ (HDFFR-1521)-BMR */
+
+ /* Select the named data set, id is checked by callee */
+ sds1_id = get_SDSbyName(sd_id, SDS1);
+
+ /* Try to move it to the external file again; should neither fail, nor have
+ any effect. External file name should still be EXTFILE2 */
+ status = SDsetexternalfile (sds1_id, EXTFILE3, OFFSET);
+ if (status < 0)
+ fprintf(stderr, "SDsetexternalfile still fail when called more than once on an SDS\n");
+
+ /* Verify that external file still is EXTFILE2, and not EXTFILE3 */
+
+ /* Call SDgetexternalinfo the first time passing in 0 for external
+ file name length to get the actual length */
+ name_len = SDgetexternalinfo(sds1_id, 0, NULL, NULL, NULL);
+ if (name_len <= 0)
+ fprintf(stderr, "SDsetexternalfile should return length greater than 0\n");
+
+ /* Prepare buffer for external file name */
+ extfile_name = (char *) HDmalloc(sizeof(char *) * (name_len+1));
+ CHECK_ALLOC(extfile_name, "extfile_name", "SDgetexternalinfo");
+ HDmemset(extfile_name, '\0', name_len+1);
+
+ /* Call SDgetexternalinfo again and get the external file info */
+ name_len = SDgetexternalinfo(sds1_id, name_len+1, extfile_name, NULL, NULL);
+ VERIFY(name_len, (intn)HDstrlen(EXTFILE2), "SDgetexternalinfo");
+ VERIFY_CHAR(EXTFILE2, extfile_name, "SDgetexternalinfo");
+
+ /* Close the data set and the file */
+ status = SDendaccess(sds1_id);
+ CHECK(status, FAIL, "SDendaccess SDS1");
+ status = SDend(sd_id);
+ CHECK(status, FAIL, "SDend");
+
+ /* Re-open the file to verify written data */
+ sd_id = SDstart (EXTTST, DFACC_RDWR);
+ CHECK(status, FAIL, "SDstart");
+
+ /* Read data of the data set and verify against the original */
+ verify_data(sd_id, 0);
+
+ /* Close the file */
+ status = SDend(sd_id);
+ CHECK(status, FAIL, "SDend");
+
+ /* Return the number of errors that's been kept track of so far */
+ return num_errs;
+} /* test_mult_setexternal() */
+
+/********************************************************************
+ Name: test_special_combos() - tests combining other specialness with
+ external data feature.
+ Description:
+ The main contents include:
+ - Open the file from test_mult_setexternal
+ - Create and write two unlimited-dimension data sets, SDS2 and SDS3
+ - Append data to the first unlimited-dimension data set, SDS2, to
+ create linked-block element.
+ - Move data of the first unlimited-dim data set, SDS2, to the external
+ file to test the combination of SPECIAL_LINKED and SPECIAL_EXT.
+ - Move data of SDS3 to the external file to test unlimited and
+ SPECIAL_EXT without linked-block element.
+ - Make a compressed data set, SDS4, in the main file.
+ - Attempt to move data of SDS4 to the external file should fail, because
+ SPECIAL_COMP and SPECIAL_EXT combo is not supported.
+ - Close everything.
+ - Re-open the file and verify data of each data set in the file.
+
+ Return value:
+ The number of errors occurred in this routine.
+
+ BMR - Jan 16, 2009
+*********************************************************************/
+int test_special_combos()
+{
+ int32 sd_id, sds1_id, sds2_id, sds3_id, sds4_id;
+ int32 sds_size;
+ int32 num_sds = 0, num_attrs = 0;
+ int32 ap_start[3], ap_edges[3], dim_sizes[3];
+ int32 sds1_size=0, sds2_size=0, sds3_size=0, sds4_size=0;
+ intn status = 0;
+ int ii, jj, kk;
+ intn num_errs = 0; /* number of errors in compression test so far */
+
+ /* Create the file and initialize the SD interface */
+ sd_id = SDstart (EXTTST, DFACC_CREATE);
+ CHECK(status, FAIL, "SDstart");
+
+ dim_sizes[0] = SD_UNLIMITED;
+ dim_sizes[1] = Y_LENGTH;
+ dim_sizes[2] = X_LENGTH;
+
+ /* Create and write two unlimited-dimension data sets, SDS2 and SDS3,
+ in the main file. Z_LENGTH is passed for unlimited dimension. */
+ sds2_size = make_SDS(sd_id, SDS2, DFNT_INT32, 3, dim_sizes, Z_LENGTH, (VOIDP)written_data);
+ sds3_size = make_SDS(sd_id, SDS3, DFNT_INT32, 3, dim_sizes, Z_LENGTH, (VOIDP)written_data);
+
+ /* Close the file to flush */
+ status = SDend(sd_id);
+ CHECK(status, FAIL, "SDend");
+
+ /* Re-open the file */
+ sd_id = SDstart (EXTTST, DFACC_RDWR);
+ CHECK(status, FAIL, "SDstart");
+
+ /* Start appending data at the end of the unlimited dimension */
+ ap_start[0] = Z_LENGTH;
+ ap_start[1] = ap_start[2] = 0;
+ ap_edges[0] = 1;
+ ap_edges[1] = Y_LENGTH;
+ ap_edges[2] = X_LENGTH;
+
+ /* Data initialization, a hyperslab 1xY_LENGTHxX_LENGTH */
+ for (kk = 0; kk < ap_edges[0]; kk++)
+ for (jj = 0; jj < ap_edges[1]; jj++)
+ for (ii = 0; ii < ap_edges[2]; ii++)
+ ap_data[kk][jj][ii] = (ii + 1) + (jj + 1) + (kk + 1);
+
+ /* Append data to the unlimited-dimension data set SDS2. This should */
+ /* produce a linked-block element, because SDS3 had been written */
+ sds2_size = append_Data2SDS(sd_id, SDS2, ap_start, ap_edges, (VOIDP)ap_data);
+ CHECK(status, FAIL, "append_Data2SDS");
+
+ /* Select the named data set, id is checked by callee */
+ sds2_id = get_SDSbyName(sd_id, SDS2);
+
+ /* Now, move SDS2's data to the external file, then check its size
+ This tests the combo: SPECIAL_LINKED and SPECIAL_EXT. 1600 is just a
+ random number that is more than enough to go pass the existing data.
+ This action verifies the fix in HDFFR-1516. SDsetexternalfile does not
+ fail. Note that there is no test for the case where calling
+ SDsetexternalfile on an external SDS failed, because SDsetexternalfile is
+ modified to have no effect if it is called more than once on an SDS, see
+ HDFFR-1521. */
+ status = SDsetexternalfile (sds2_id, EXTFILE2, 1600);
+ CHECK(status, FAIL, "SDsetexternalfile");
+
+ /* Verify data size */
+ verify_datasize(sds2_id, sds2_size, SDS2);
+
+ /* Move data of an existing contiguous data set to the external file */
+
+ /* Select the named data set, id is checked by callee */
+ sds3_id = get_SDSbyName(sd_id, SDS3);
+
+ /* Move SDS3's data to the external file, then check its size. This also */
+ /* tests moving an existing unlimited-dimension data set to external file */
+ status = SDsetexternalfile(sds3_id, EXTFILE2, 2500); /* random spot */
+ CHECK(status, FAIL, "SDsetexternalfile");
+ verify_datasize(sds3_id, sds3_size, SDS3);
+
+ /* Attempt to move a compressed data set to an external file; should fail */
+
+ /* Create and write to a compressed data set */
+ sds4_size = make_SDS(sd_id, SDS4, DFNT_INT32, 3, dim_sizes, Z_LENGTH, (VOIDP)written_data);
+
+ /* Select the named data set, id is checked by callee */
+ sds4_id = get_SDSbyName(sd_id, SDS4);
+
+ /* Try to move SDS4's data to the external file, should fail because HDF4
+ doesn't support compressed and external data together */
+ status = SDsetexternalfile(sds4_id, EXTFILE2, 4000); /* random spot */
+ CHECK(status, FAIL, "SDsetexternalfile");
+
+ /* Check the data length of each data set */
+ verify_datasize(sds2_id, sds2_size, SDS2);
+ verify_datasize(sds3_id, sds3_size, SDS3);
+ verify_datasize(sds4_id, sds4_size, SDS4);
+
+ status = SDendaccess(sds2_id);
+ CHECK(status, FAIL, "SDendaccess SDS2");
+ status = SDendaccess(sds3_id);
+ CHECK(status, FAIL, "SDendaccess SDS3");
+ status = SDendaccess(sds4_id);
+ CHECK(status, FAIL, "SDendaccess SDS4");
+
+ /* Close the file */
+ status = SDend(sd_id);
+ CHECK(status, FAIL, "SDend");
+
+ /* Re-open the file to verify written data */
+ sd_id = SDstart (EXTTST, DFACC_RDWR);
+ CHECK(status, FAIL, "SDstart");
+
+ status = SDfileinfo(sd_id, &num_sds, &num_attrs);
+ CHECK(status, FAIL, "SDfileinfo");
+
+ /* Read data of each data sets and verify against the original */
+ for (ii = 0; ii < num_sds; ii++)
+ {
+ verify_data(sd_id, ii);
+ }
+
+ /* Close the file */
+ status = SDend(sd_id);
+ CHECK(status, FAIL, "SDend");
+
+ /* Return the number of errors that's been kept track of so far */
+ return num_errs;
+} /* test_special_combos() */
+
+/* Test driver for testing external file functions */
+ /* extern int test_external()
+ */
+int test_external()
+{
+ int ii, jj, kk;
+ intn num_errs = 0; /* number of errors */
+
+ /* Data initialization */
+ for (kk = 0; kk < Z_LENGTH; kk++)
+ for (jj = 0; jj < Y_LENGTH; jj++)
+ for (ii = 0; ii < X_LENGTH; ii++)
+ written_data[kk][jj][ii] = (ii + 1) + (jj + 1) + (kk + 1);
+
+ /* Output message about test being performed */
+ TESTING("external file functions (texternal.c)");
+
+ /* Test SDsetexternalfile basic functionality (modified from hdftest.c) */
+ num_errs = num_errs + test_setexternal();
+
+ /* Test SDgetexternalfile basic functionality */
+ num_errs = num_errs + test_getexternal();
+
+ /* Test that calling SDsetexternalfile repeatedly will not fail (HDFFR-1516)
+ -BMR, 10/29/15 */
+ num_errs = num_errs + test_mult_setexternal();
+
+ /* Test multiple specialness */
+ num_errs = num_errs + test_special_combos();
+
+ if (num_errs == 0) PASSED();
+ return num_errs;
+}
+
+/*********************** Local utility functions ***********************/
+
+/********************************************************************
+ Name: verify_data() - Verifies the written data, given the SDS' index.
+ Description:
+ Calls SDselect, SDgetinfo, and SDreaddata to verify the sds_ind'th
+ data against the original buffer.
+ Return value:
+ None.
+ BMR - Dec 1, 2015
+*********************************************************************/
+void verify_data(int32 sd_id, int32 sds_ind)
+{
+ int32 sds_id;
+ int32 *ptr;
+ char name[80];
+ int32 data_size, rank1;
+ int32 start[3], edges[3], dims[3];
+ intn status;
+ int32 *outdata = NULL, num_elems;
+ int32 outd[140];
+ intn num_errs = 0; /* number of errors in compression test so far */
+
+ /* Select the data set. */
+ sds_id = SDselect (sd_id, sds_ind);
+ CHECK(status, FAIL, "SDselect");
+
+ /* Set the parameters start and edges to read */
+ edges[1] = Y_LENGTH;
+ edges[2] = X_LENGTH;
+ start[0] = start[1] = start[2] = 0;
+
+ /* Get the name of the data set */
+ status = SDgetinfo(sds_id, name, &rank1, dims, NULL, NULL);
+ CHECK(status, FAIL, "SDgetinfo");
+
+ /* The data set SDS2 has appended data so the written data is different
+ from the rest of the data sets in the file */
+ if (!HDstrncmp(name, SDS2, HDstrlen(SDS2)))
+ {
+ /* Buffer for first written data + appended data */
+ int32 data_wappended[Z_LENGTH+1][Y_LENGTH][X_LENGTH];
+
+ /* Number of elements in first written data + appended data */
+ num_elems = Z_LENGTH*Y_LENGTH*X_LENGTH + 1*Y_LENGTH*X_LENGTH;
+
+ /* Copy buffer of first written data to data_wappended */
+ HDmemcpy(data_wappended, written_data, (Z_LENGTH*Y_LENGTH*X_LENGTH)*sizeof(int));
+
+ /* Forward to the end of first written data */
+ ptr = &data_wappended[Z_LENGTH][0][0];
+
+ /* Copy appended data to data_wappended */
+ HDmemcpy(ptr, ap_data, (1*Y_LENGTH*X_LENGTH)*sizeof(int));
+
+ /* Back to the beginning of data_wappended */
+ ptr = &data_wappended[0][0][0];
+
+ /* Size of data written including appended data */
+ data_size = ((Z_LENGTH+1) * Y_LENGTH*X_LENGTH)*sizeof(int);
+ edges[0] = Z_LENGTH + 1;
+
+ } /* with appended data */
+
+ /* Everyone else */
+ else
+ {
+ /* Point to written data buffer */
+ ptr = &written_data[0][0][0];
+
+ /* Number of elements */
+ num_elems = Z_LENGTH*Y_LENGTH*X_LENGTH;
+
+ /* Size of data written */
+ data_size = num_elems * sizeof(int);
+ edges[0] = Z_LENGTH;
+ }
+
+ /* Allocate buffer for reading, after establishing the data size */
+ outdata = (int32 *) HDmalloc(data_size);
+
+ /* Read the entire sds and verify that the data is as the original buffer */
+ status = SDreaddata(sds_id, start, NULL, edges, (VOIDP) outdata);
+ CHECK(status, FAIL, "SDreaddata");
+
+ /* Verify that data is correct comparing against the written data */
+ {
+ int ii;
+ int32* out;
+ out = &outdata[0];
+
+ for (ii = 0; ii < num_elems; ii++, ptr++, out++)
+ if (*ptr != *out)
+ {
+ fprintf(stderr, "Data read (%d) is different than written (%d) for SDS #%d, name = %s\n", *out, *ptr, sds_ind, name);
+ }
+ }
+
+ /* Terminate access to the data set, SD interface, and file. */
+ status = SDendaccess (sds_id);
+ CHECK(status, FAIL, "SDendaccess");
+
+} /* verify_data */
+#endif /* HDF */
+
diff --git a/mfhdf/test/tfile.c b/mfhdf/test/tfile.c
index a9ba0a5..eb0425e 100644
--- a/mfhdf/test/tfile.c
+++ b/mfhdf/test/tfile.c
@@ -11,10 +11,6 @@
* access to either file, you may request a copy from help at hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#ifdef RCSID
-static char RcsId[] = "@(#)$Revision: 5218 $";
-#endif
-
/* $Id: tfile.c 5218 2009-08-29 04:21:49Z bmribler $ */
#include "mfhdf.h"
@@ -24,36 +20,36 @@ static char RcsId[] = "@(#)$Revision: 5218 $";
#include "hdftest.h"
/********************************************************************
- Name: test_file_inuse() - tests preventing of an in-use file being
- removed at cleanup time.
-
- Description:
- Sometime, when an error occurs, the cleanup process attempts to
- remove a file, which might still be in use (part of bugzilla #376.)
- The routine test_file_inuse is to test the fix that provides the
- underlaying call to HPisfile_in_use, which should successfully
- determines whether a file is still in use before an attempt to remove.
-
- The main contents include:
- - a loop that repeatedly calls SDstart/DFACC_CREATE; only the first
- SDstart succeeds, the subsequent ones should fail.
- - SDcreate, SDwritedata, SDendaccess follow
- - outside of that loop is another loop to call SDend corresponding
- to the previous SDstart's
- - then, at the end, the file will be reopened; if the file doesn't
- exist and causes SDstart to fail, the test will fail.
-
- Before the fix, when the 2nd SDstart/DFACC_CREATE was called and
- failed because the file was being in use from the first call to
- SDstart/DFACC_CREATE, the cleaning process removed the file.
+ Name: test_file_inuse() - tests preventing of an in-use file being
+ removed at cleanup time.
+
+ Description:
+ Sometime, when an error occurs, the cleanup process attempts to
+ remove a file, which might still be in use (part of bugzilla #376.)
+ The routine test_file_inuse is to test the fix that provides the
+ underlaying call to HPisfile_in_use, which should successfully
+ determines whether a file is still in use before an attempt to remove.
+
+ The main contents include:
+ - a loop that repeatedly calls SDstart/DFACC_CREATE; only the first
+ SDstart succeeds, the subsequent ones should fail.
+ - SDcreate, SDwritedata, SDendaccess follow
+ - outside of that loop is another loop to call SDend corresponding
+ to the previous SDstart's
+ - then, at the end, the file will be reopened; if the file doesn't
+ exist and causes SDstart to fail, the test will fail.
+
+ Before the fix, when the 2nd SDstart/DFACC_CREATE was called and
+ failed because the file was being in use from the first call to
+ SDstart/DFACC_CREATE, the cleaning process removed the file.
Return value:
- The number of errors occurred in this routine.
+ The number of errors occurred in this routine.
BMR - Jun 22, 2005
*********************************************************************/
-#define FILE_NAME "bug376.hdf" /* data file to test */
+#define FILE_NAME "bug376.hdf" /* data file to test */
#define DIM0 10
static intn
@@ -71,12 +67,12 @@ test_file_inuse()
{
/* Create and open the file and initiate the SD interface. */
sd_id[i] = SDstart(FILE_NAME, DFACC_CREATE);
- if (i == 0) {
- CHECK(sd_id[i], FAIL, "SDstart"); } /* 1st SDstart must pass */
- else {
- VERIFY(sd_id[i], FAIL, "SDstart"); }
- /* subsequent SDstart should fail, which causes the following calls
- to fail as well */
+ if (i == 0) {
+ CHECK(sd_id[i], FAIL, "SDstart"); } /* 1st SDstart must pass */
+ else {
+ VERIFY(sd_id[i], FAIL, "SDstart"); }
+ /* subsequent SDstart should fail, which causes the following calls
+ to fail as well */
/* Define the rank and dimensions of the data sets to be created. */
rank = 1;
@@ -86,29 +82,29 @@ test_file_inuse()
/* Create the array data set. */
sds_id[i] = SDcreate(sd_id[i], names[i], DFNT_INT16, rank, dims);
- if (i == 0) {
- CHECK(sds_id[i], FAIL, "SDcreate"); } /* 1st SDcreate must pass */
- else
- VERIFY(sds_id[i], FAIL, "SDcreate");
+ if (i == 0) {
+ CHECK(sds_id[i], FAIL, "SDcreate"); } /* 1st SDcreate must pass */
+ else
+ VERIFY(sds_id[i], FAIL, "SDcreate");
/* Fill the stored-data array with values. */
for (j = 0; j < DIM0; j++) {
array_data[j] = (i + 1)*(j + 1);
}
- /* Write data to the data set */
- statusn = SDwritedata(sds_id[i], start, NULL, edges, (VOIDP)array_data);
- if (i == 0) {
- CHECK(statusn, FAIL, "SDwritedata"); } /* 1st SDwritedata must pass */
- else
- VERIFY(statusn, FAIL, "SDwritedata");
+ /* Write data to the data set */
+ statusn = SDwritedata(sds_id[i], start, NULL, edges, (VOIDP)array_data);
+ if (i == 0) {
+ CHECK(statusn, FAIL, "SDwritedata"); } /* 1st SDwritedata must pass */
+ else
+ VERIFY(statusn, FAIL, "SDwritedata");
/* Terminate access to the data sets. */
statusn = SDendaccess(sds_id[i]);
- if (i == 0) {
- CHECK(statusn, FAIL, "SDendaccess"); } /* 1st SDendaccess must pass */
- else
- VERIFY(statusn, FAIL, "SDendaccess");
+ if (i == 0) {
+ CHECK(statusn, FAIL, "SDendaccess"); } /* 1st SDendaccess must pass */
+ else
+ VERIFY(statusn, FAIL, "SDendaccess");
} /* for i */
@@ -116,10 +112,10 @@ test_file_inuse()
{
/* Terminate access to the SD interface and close the file. */
statusn = SDend (sd_id[i]);
- if (i == 0) {
- CHECK(statusn, FAIL, "SDend"); } /* 1st SDend must pass */
- else
- VERIFY(statusn, FAIL, "SDend");
+ if (i == 0) {
+ CHECK(statusn, FAIL, "SDend"); } /* 1st SDend must pass */
+ else
+ VERIFY(statusn, FAIL, "SDend");
}
/* Try to open the file, which should exist */
@@ -134,39 +130,39 @@ test_file_inuse()
/********************************************************************
Name: test_max_open_files() - tests the new API SDreset_maxopenfiles,
- SDget_maxopenfiles, SDget_numopenfiles,
- and SDgetfilename.
-
- Description:
- There were multiple requests from the users to increase the maximum
- number of opened files allowed. SDreset_maxopenfiles is added to
- allow the user to reset that value. The current default value is 32.
- This API can be called anytime to increase it. This test routine will
- carry out the following tests:
-
- - Get the current max, should be the default (32,) and the system limit
- - Reset current max to an arbitrary number that is larger than the
- default and verify
- - Try to create more files than the current max and all should
- succeed, because NC_open resets the current max to system limit
- automatically, when the number of opened files exceeds the current
- max
- - Get the current max and system limit and verify, current max
- should be the system limit
- - Get the current max another way, it should be the system limit again
- - Get the current number of files being opened
- - Reset current max to a value that is smaller than the current
- number of opened files; it shouldn't reset
- - Reset current max again to a value that is smaller than the
- current max but larger than the current number of opened files,
- that should work for there is no information loss
- - Try to create more files up to the system limit or NUM_FILES_HI,
- because the arrays have max NUM_FILES_HI elements in this test
- - Close all the files, then try opening all again to verify their
- names, this is to test bugzilla 440
+ SDget_maxopenfiles, SDget_numopenfiles,
+ and SDgetfilename.
+
+ Description:
+ There were multiple requests from the users to increase the maximum
+ number of opened files allowed. SDreset_maxopenfiles is added to
+ allow the user to reset that value. The current default value is 32.
+ This API can be called anytime to increase it. This test routine will
+ carry out the following tests:
+
+ - Get the current max, should be the default (32,) and the system limit
+ - Reset current max to an arbitrary number that is larger than the
+ default and verify
+ - Try to create more files than the current max and all should
+ succeed, because NC_open resets the current max to system limit
+ automatically, when the number of opened files exceeds the current
+ max
+ - Get the current max and system limit and verify, current max
+ should be the system limit
+ - Get the current max another way, it should be the system limit again
+ - Get the current number of files being opened
+ - Reset current max to a value that is smaller than the current
+ number of opened files; it shouldn't reset
+ - Reset current max again to a value that is smaller than the
+ current max but larger than the current number of opened files,
+ that should work for there is no information loss
+ - Try to create more files up to the system limit or NUM_FILES_HI,
+ because the arrays have max NUM_FILES_HI elements in this test
+ - Close all the files, then try opening all again to verify their
+ names, this is to test bugzilla 440
Return value:
- The number of errors occurred in this routine.
+ The number of errors occurred in this routine.
BMR - Oct 14, 2005
*********************************************************************/
@@ -176,16 +172,16 @@ test_file_inuse()
static int test_max_open_files()
{
- int32 fids[NUM_FILES_HI]; /* holds IDs of opened files */
- char filename[NUM_FILES_HI][10]; /* holds generated file names */
- char readfname[H4_MAX_NC_NAME]; /* file name retrieved from file id */
+ int32 fids[NUM_FILES_HI]; /* holds IDs of opened files */
+ char filename[NUM_FILES_HI][10]; /* holds generated file names */
+ char readfname[H4_MAX_NC_NAME]; /* file name retrieved from file id */
intn index, status,
- curr_max, /* curr maximum number of open files allowed in HDF */
- sys_limit, /* maximum number of open files allowed by system */
- curr_max_bk, /* back up of curr_max */
- curr_opened, /* number of files currently being opened */
- temp_limit, /* temp var - num of files to be opened in this test */
- num_errs = 0; /* number of errors so far */
+ curr_max, /* curr maximum number of open files allowed in HDF */
+ sys_limit, /* maximum number of open files allowed by system */
+ curr_max_bk, /* back up of curr_max */
+ curr_opened, /* number of files currently being opened */
+ temp_limit, /* temp var - num of files to be opened in this test */
+ num_errs = 0; /* number of errors so far */
/* Get the current max and system limit */
status = SDget_maxopenfiles(&curr_max, &sys_limit);
@@ -194,17 +190,16 @@ static int test_max_open_files()
/* Reset current max to an arbitrary number and check */
curr_max = SDreset_maxopenfiles(33);
- CHECK(status, FAIL, "test_maxopenfiles: SDreset_maxopenfiles");
VERIFY(curr_max, 33, "test_maxopenfiles: SDreset_maxopenfiles");
/* Try to create more files than the default max (currently, 32) and
all should succeed */
for (index=0; index < NUM_FILES_LOW; index++)
{
- /* Create a file */
- sprintf(filename[index], "file%i", index);
- fids[index] = SDstart(filename[index], DFACC_CREATE);
- CHECK(fids[index], FAIL, "test_maxopenfiles: SDstart");
+ /* Create a file */
+ sprintf(filename[index], "file%i", index);
+ fids[index] = SDstart(filename[index], DFACC_CREATE);
+ CHECK(fids[index], FAIL, "test_maxopenfiles: SDstart");
}
/* Verify that NUM_FILES_LOW files are opened */
@@ -240,14 +235,14 @@ static int test_max_open_files()
curr_opened = SDget_numopenfiles();
VERIFY(curr_opened, NUM_FILES_LOW, "test_maxopenfiles: SDget_numopenfiles");
- /* Reset current max to a value that is smaller than the current
+ /* Reset current max to a value that is smaller than the current
number of opened files; it shouldn't reset */
curr_max_bk = curr_max;
curr_max = SDreset_maxopenfiles(curr_opened-1);
VERIFY(curr_max, curr_max_bk, "test_maxopenfiles: SDreset_maxopenfiles");
- /* Reset current max again to a value that is smaller than the
- current max but larger than the current number of opened files,
+ /* Reset current max again to a value that is smaller than the
+ current max but larger than the current number of opened files,
that should work for there is no information loss */
curr_max = SDreset_maxopenfiles(curr_opened+3);
VERIFY(curr_max, curr_opened+3, "test_maxopenfiles: SDreset_maxopenfiles");
@@ -259,8 +254,8 @@ static int test_max_open_files()
for (index=NUM_FILES_LOW; index < temp_limit; index++)
{
/* Create a file */
- sprintf(filename[index], "file%i", index);
- fids[index] = SDstart(filename[index], DFACC_CREATE);
+ sprintf(filename[index], "file%i", index);
+ fids[index] = SDstart(filename[index], DFACC_CREATE);
/* if SDstart fails due to "too many open files," then adjust
temp_limit so that further failure can be prevented, i.e.
@@ -274,53 +269,53 @@ static int test_max_open_files()
CHECK(fids[index], FAIL, "test_maxopenfiles: SDstart");
}
- /* Close all the files, then try opening all again to verify their
+ /* Close all the files, then try opening all again to verify their
names, this is to test bugzilla 440 */
for (index=0; index < temp_limit; index++)
{
- status = SDend(fids[index]);
- CHECK(status, FAIL, "test_maxopenfiles: SDend");
+ status = SDend(fids[index]);
+ CHECK(status, FAIL, "test_maxopenfiles: SDend");
- fids[index] = SDstart(filename[index], DFACC_RDWR);
- CHECK(fids[index], FAIL, "test_maxopenfiles: SDstart");
+ fids[index] = SDstart(filename[index], DFACC_RDWR);
+ CHECK(fids[index], FAIL, "test_maxopenfiles: SDstart");
}
/* Verify their names */
for (index=0; index < temp_limit; index++)
{
- status = SDgetfilename(fids[index], readfname);
- CHECK(status, FAIL, "test_maxopenfiles: SDgetfilename");
-
- /* Verify the file name retrieved against the original */
- if (HDstrcmp(readfname, filename[index]))
- {
- fprintf(stderr, "SDgetfilename: incorrect file being opened - expected <%s>, retrieved <%s>\n", filename[index], readfname);
- }
+ status = SDgetfilename(fids[index], readfname);
+ CHECK(status, FAIL, "test_maxopenfiles: SDgetfilename");
+
+ /* Verify the file name retrieved against the original */
+ if (HDstrcmp(readfname, filename[index]))
+ {
+ fprintf(stderr, "SDgetfilename: incorrect file being opened - expected <%s>, retrieved <%s>\n", filename[index], readfname);
+ }
}
/* Close then remove all the files */
for (index=0; index < temp_limit; index++)
{
- status = SDend(fids[index]);
- CHECK(status, FAIL, "test_maxopenfiles: SDend");
- remove(filename[index]);
+ status = SDend(fids[index]);
+ CHECK(status, FAIL, "test_maxopenfiles: SDend");
+ remove(filename[index]);
}
return num_errs;
}
/********************************************************************
Name: test_longfilename() - tests that the library can handle a very
- long file name (bugzilla 1331.)
+ long file name (bugzilla 1331.)
- Description:
- The main contents include:
- - create a file with very long name
- - create a dataset and close it (this is to activate the failure)
- - SDend to close the file, and it would cause segfault before the
- fix was applied.
+ Description:
+ The main contents include:
+ - create a file with very long name
+ - create a dataset and close it (this is to activate the failure)
+ - SDend to close the file, and it would cause segfault before the
+ fix was applied.
Return value:
- The number of errors occurred in this routine.
+ The number of errors occurred in this routine.
BMR - Jan 16, 2009
*********************************************************************/
@@ -358,7 +353,78 @@ test_longfilename()
CHECK(status, FAIL, "test_longfilename: SDendaccess");
status = SDend(fid);
- CHECK(fid, FAIL, "test_longfilename: SDend");
+ CHECK(status, FAIL, "test_longfilename: SDend");
+
+ return num_errs;
+}
+
+
+/********************************************************************
+ Name: test_fileformat() - tests that a file format can be
+ determined (HDFFR-1519)
+
+ Description:
+ The main contents include:
+ - call Hishdf() on an hdf file and a non-hdf file
+ - call HDiscdf() on a cdf file and a non-cdf file
+ - call HDisnetcdf() on a netCDF file and a non-netCDF file
+ - call HDisnetcdf64() on a 64-bit netCDF file and a classic netCDF file
+
+ Return value:
+ The number of errors occurred in this routine.
+
+ BMR - Jun 06, 2016
+*********************************************************************/
+
+static int
+test_fileformat()
+{
+ int32 fid; /* file id */
+ intn ishdf = 0; /* true if file has HDF format */
+ intn isnetcdf = 0; /* true if file has classic netCDF format */
+ intn isnetcdf64 = 0; /* true if file has 64-bit netCDF format */
+ intn num_errs = 0; /* number of errors so far */
+ char testfile[512] = "";
+ char *hdf_basename = "hdffile.hdf"; /* hdf file to test */
+ char *netcdf1_basename = "Roy.nc"; /* classic netCDF file to test */
+ char *netcdf2_basename = "Roy-64.nc"; /* netCDF 64-bit file to test */
+ intn status = 0; /* status returned by called functions */
+
+ /* Create an empty HDF file to test Hishdf. */
+ fid = SDstart(hdf_basename, DFACC_CREATE);
+ CHECK(fid, FAIL, "SDstart");
+ status = SDend(fid);
+ CHECK(status, FAIL, "test_longfilename: SDend");
+
+ /* Verify that this is an HDF file */
+ ishdf = Hishdf(hdf_basename);
+ VERIFY(ishdf, TRUE, "test_fileformat: Hishdf");
+
+ /* Verify that this is not a netCDF 64-bit file */
+ isnetcdf64 = HDisnetcdf64(hdf_basename);
+ VERIFY(isnetcdf64, FALSE, "test_fileformat: HDisnetcdf64");
+
+ /* Make the name of the classic netCDF file */
+ make_datafilename(netcdf1_basename, testfile, sizeof(testfile));
+
+ /* Verify that this is not an HDF file */
+ ishdf = Hishdf(testfile);
+ VERIFY(ishdf, FALSE, "test_fileformat: Hishdf");
+
+ /* Verify that this is a classic netCDF file */
+ isnetcdf = HDisnetcdf(testfile);
+ VERIFY(isnetcdf, TRUE, "test_fileformat: HDisnetcdf");
+
+ /* Make the name of the netCDF 64-bit file */
+ make_datafilename(netcdf2_basename, testfile, sizeof(testfile));
+
+ /* Verify that this is a netCDF 64-bit file */
+ isnetcdf64 = HDisnetcdf64(testfile);
+ VERIFY(isnetcdf64, TRUE, "test_fileformat: HDisnetcdf64");
+
+ /* Verify that this is not a classic netCDF file */
+ isnetcdf = HDisnetcdf(testfile);
+ VERIFY(isnetcdf, FALSE, "test_fileformat: HDisnetcdf");
return num_errs;
}
@@ -373,17 +439,20 @@ test_files()
/* Output message about test being performed */
TESTING("miscellaneous file related functions (tfile.c)");
- /* test that an in-use file is not removed in certain failure
+ /* test that an in-use file is not removed in certain failure
cleanup. 06/21/05 - bugzilla 376 - BMR */
num_errs = num_errs + test_file_inuse();
- /* test APIs that were added for fixing bugzilla 396 and 440.
+ /* test APIs that were added for fixing bugzilla 396 and 440.
09/07/05 - BMR */
num_errs = num_errs + test_max_open_files();
/* test the fix of bugzzila 1331. 01/16/09 - BMR */
num_errs = num_errs + test_longfilename();
+ /* test the fix of JIRA HDFFR-1519. 06/06/16 - BMR */
+ num_errs = num_errs + test_fileformat();
+
if (num_errs == 0) PASSED();
return num_errs;
}
diff --git a/mfhdf/test/tsdsprops.c b/mfhdf/test/tsdsprops.c
index 1d36370..19f1e2d 100644
--- a/mfhdf/test/tsdsprops.c
+++ b/mfhdf/test/tsdsprops.c
@@ -583,7 +583,7 @@ test_valid_args()
#define D3_Z 2
/* Helper function to test_valid_args2 creates and writes to a dataset */
-intn makeSDS(int32 sd_id, char* name, int32 dtype, int32 rank,
+static intn makeSDS(int32 sd_id, char* name, int32 dtype, int32 rank,
int32* dimsizes, int32* start, int32* strides,
int32* count, void* data)
{
diff --git a/mfhdf/test/tunlim.c b/mfhdf/test/tunlim.c
index 4417774..1643dcc 100644
--- a/mfhdf/test/tunlim.c
+++ b/mfhdf/test/tunlim.c
@@ -95,7 +95,7 @@ int verify_info_data(
/* Verify read data by comparing the output buffer against expected data */
status = HDmemcmp(outdata, result, edges[0] * SIZE_INT16);
if (status != 0)
- fprintf(stderr,"%s: Read data doesn't match input\n");
+ fprintf(stderr,"For SDS %s: Read data doesn't match input\n", ds_name);
/* Return the number of errors occurred here */
return(num_errs);
diff --git a/mfhdf/test/tutils.c b/mfhdf/test/tutils.c
index 2def652..4e50474 100644
--- a/mfhdf/test/tutils.c
+++ b/mfhdf/test/tutils.c
@@ -11,11 +11,20 @@
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
#include "mfhdf.h"
+
+#ifdef HDF
+
#include "hdftest.h"
-/* Generate the correct name for the test file, by prepending the source path
- if it exists, otherwise, assume it is the local directory */
- /* NOTE: should move all utilities into mfutil.c or something like that */
+/********************************************************************
+ Name: make_datafilename() - Generates the correct name for the test file.
+ Description:
+ Generate the correct name for the test file by prepending the source
+ path if it exists, otherwise, assume it is the local directory.
+ Return value:
+ Returns SUCCEED if the file name is generated successfully, or
+ FAIL, otherwise.
+*********************************************************************/
intn make_datafilename(char* basename, char* testfile, unsigned int size)
{
char *srcdir = getenv("srcdir");
@@ -54,3 +63,303 @@ intn make_datafilename(char* basename, char* testfile, unsigned int size)
HDstrcpy(testfile, tempfile);
return SUCCEED;
}
+
+
+/********************************************************************
+ Name: make_SDS() - Creates and writes a 3-D unlimited SDS.
+ Description:
+ Calls SDcreate, SDwritedata, and SDendaccess to create a 3-D
+ unlimited SDS, then close it.
+ (Note: should be modified for more different ranks.)
+ Return value:
+ Returns the size of the data that had been written successfully.
+ BMR - Dec 1, 2015
+*********************************************************************/
+int32 make_SDS(int32 sd_id, char* sds_name, int32 type, int32 rank,
+ int32* dim_sizes, int32 unlim_dim, VOIDP written_data)
+{
+ int32 sds_id;
+ int32 *start, *edges;
+ int32 sds_size = 0, count = 0;
+ intn status, ii;
+ intn num_errs = 0; /* number of errors in compression test so far */
+
+ start = (int32*)HDmalloc(sizeof(int32) * rank);
+ edges = (int32*)HDmalloc(sizeof(int32) * rank);
+
+ /* Create the array with the name defined in SDS_NAME */
+ sds_id = SDcreate (sd_id, sds_name, type, rank, dim_sizes);
+ CHECK(sds_id, FAIL, "SDcreate");
+
+ /* Set the parameters start and edges to write */
+ for (ii = 0; ii < rank; ii++)
+ {
+ start[ii] = 0;
+ edges[ii] = dim_sizes[ii];
+ }
+
+ /* Give real size to the unlimited dimension */
+ if (dim_sizes[0] == SD_UNLIMITED)
+ edges[0] = unlim_dim;
+
+ /* Write the data */
+ status = SDwritedata (sds_id, start, NULL, edges, written_data);
+ CHECK(status, FAIL, "SDwritedata");
+
+ /* Calculate data set's size to verify later */
+ for (ii = 0; ii < rank; ii++)
+ {
+ if (ii == 0)
+ count = edges[0];
+ else
+ count = count * edges[ii];
+ }
+ sds_size = count * DFKNTsize(type);
+
+ /* Terminate access to the data set */
+ status = SDendaccess (sds_id);
+ CHECK(status, FAIL, "SDendaccess");
+
+ HDfree(edges);
+ HDfree(start);
+
+ return(sds_size);
+
+} /* make_SDS */
+
+/********************************************************************
+ Name: make_CompSDS() - Creates and writes 3D compressed SDS.
+ Description:
+ Calls SDcreate, SDsetcompress, SDwritedata, and SDendaccess to
+ create an SDS with SKPHUFF compression, then close it.
+ (Note: should be modified for different ranks.)
+ Return value:
+ Returns the size of the data that had been written successfully.
+ BMR - Dec 1, 2015
+*********************************************************************/
+int32 make_CompSDS(int32 sd_id, char* sds_name, int32 type, int32 rank,
+ int32* dim_sizes, VOIDP written_data)
+{
+ int32 sds_id;
+ int32 *start, *edges;
+ comp_coder_t comp_type; /* Compression flag */
+ comp_info c_info; /* Compression structure */
+ int32 sds_size = 0, count = 0;
+ intn status, ii;
+ intn num_errs = 0; /* number of errors in compression test so far */
+
+ start = (int32*)HDmalloc(sizeof(int32) * rank);
+ edges = (int32*)HDmalloc(sizeof(int32) * rank);
+
+ /* Define dimensions of the array to be created */
+ /* dim_sizes[0] = Z_LENGTH;
+ dim_sizes[1] = Y_LENGTH;
+ dim_sizes[2] = X_LENGTH;
+ */
+
+ /* Create the array with the name defined in SDS_NAME */
+ sds_id = SDcreate (sd_id, sds_name, type, rank, dim_sizes);
+ CHECK(status, FAIL, "SDcreate");
+
+ /* Set compression for the data set */
+ comp_type = COMP_CODE_SKPHUFF;
+ c_info.skphuff.skp_size = 18;
+ status = SDsetcompress (sds_id, comp_type, &c_info);
+ CHECK(status, FAIL, "SDsetcompress");
+
+ /* Set the parameters start and edges to write */
+ for (ii = 0; ii < rank; ii++)
+ {
+ start[ii] = 0;
+ edges[ii] = dim_sizes[ii];
+ }
+
+ /* Write the data */
+ status = SDwritedata (sds_id, start, NULL, edges, written_data);
+ CHECK(status, FAIL, "SDwritedata");
+
+ /* Compute the uncompressed data size, just to have makeCompSDS similar
+ to the other create SDS functions; we don't need to verify the data
+ size because we don't move compressed data to external file */
+ for (ii = 0; ii < rank; ii++)
+ {
+ count = count + dim_sizes[ii];
+ }
+ sds_size = count * DFKNTsize(type);
+
+ /* Terminate access to the data set */
+ status = SDendaccess (sds_id);
+ CHECK(status, FAIL, "SDendaccess");
+
+ HDfree(edges);
+ HDfree(start);
+
+ return(sds_size);
+} /* make_CompSDS */
+
+/********************************************************************
+ Name: make_Ext3D_SDS() - Creates and writes a 3D SDS with external data.
+ Description:
+ Calls SDcreate, SDsetexternalfile, SDwritedata, and SDendaccess
+ to create a 3-D SDS with external data storage, then close it.
+ (Note: should be modified for different ranks.)
+ Return value:
+ Returns the size of the data that had been written successfully.
+ Return value:
+ None.
+ BMR - Dec 1, 2015
+*********************************************************************/
+int32 make_Ext3D_SDS(int32 sd_id, char* sds_name, int32 type, int32 rank,
+ int32* dim_sizes, VOIDP written_data,
+ int32 offset, char* ext_file_name)
+{
+ int32 sds_id;
+ int32 *start, *edges;
+ int32 sds_size = 0, count;
+ intn status, ii;
+ intn num_errs = 0; /* number of errors in compression test so far */
+
+ start = (int32*)HDmalloc(sizeof(int32) * rank);
+ edges = (int32*)HDmalloc(sizeof(int32) * rank);
+
+ /* Set the parameters start and edges to write */
+ for (ii = 0; ii < rank; ii++)
+ {
+ start[ii] = 0;
+ edges[ii] = dim_sizes[ii];
+ }
+
+ /* Define dimensions of the array to be created. */
+ /* dim_sizes[0] = Z_LENGTH;
+ dim_sizes[1] = Y_LENGTH;
+ dim_sizes[2] = X_LENGTH;
+ */
+
+ /* Create the array with the name defined in SDS_NAME. */
+ sds_id = SDcreate (sd_id, sds_name, type, rank, dim_sizes);
+ CHECK(status, FAIL, "SDcreate");
+
+ status = SDsetexternalfile (sds_id, ext_file_name, offset);
+ CHECK(status, FAIL, "SDsetexternalfile");
+
+ /* Write the data */
+ status = SDwritedata (sds_id, start, NULL, edges, written_data);
+ CHECK(status, FAIL, "SDwritedata");
+
+ /* Calculate data set's size to verify later */
+ count = 1;
+ for (ii = 0; ii < rank; ii++)
+ {
+ count = count * dim_sizes[ii];
+ }
+ sds_size = count * DFKNTsize(type);
+
+
+ /* Terminate access to the data set */
+ status = SDendaccess (sds_id);
+ CHECK(status, FAIL, "SDendaccess");
+
+ HDfree(edges);
+ HDfree(start);
+
+ return sds_size;
+} /* make_Ext3D_SDS */
+
+/********************************************************************
+ Name: get_SDSbyName() - Find and open an SDS by name.
+ Description:
+ Calls SDnametoindex and SDselect to open a data set by its name.
+ Return value:
+ Returns the SDS' identifier.
+ BMR - Dec 1, 2015
+*********************************************************************/
+int32 get_SDSbyName(int32 sd_id, char* sds_name)
+{
+ int32 sds_id, sds_index;
+ intn status;
+ intn num_errs = 0; /* number of errors in compression test so far */
+
+ sds_index = SDnametoindex(sd_id, sds_name);
+ CHECK(sds_index, FAIL, "SDnametoindex");
+
+ /* Select the data set */
+ sds_id = SDselect (sd_id, sds_index);
+ CHECK(sds_id, FAIL, "SDselect");
+
+ /* Return the data set id */
+ return(sds_id);
+
+} /* get_SDSbyName */
+
+/********************************************************************
+ Name: append_Data2SDS() - Selects a data set by name then write data to it.
+ Description:
+ Uses the helper function get_SDSbyName to find and open the
+ data set, then calls SDwritedata to append data, and SDendaccess
+ to close it.
+ Return value:
+ Returns the size of the data that had been written successfully.
+ BMR - Dec 1, 2015
+*********************************************************************/
+int32 append_Data2SDS(int32 sd_id, char* sds_name, int32* start, int32* edges, void* ap_data)
+{
+ int32 sds_id, sds_index;
+ int32 sds_size, ntype;
+ int32 comp_size=0, uncomp_size=0;
+char name[80];
+ intn status;
+ intn num_errs = 0; /* number of errors in compression test so far */
+
+
+ /* Find and select the data set */
+ sds_id = get_SDSbyName(sd_id, sds_name);
+ CHECK(sds_id, FAIL, "get_SDSbyName");
+
+ /* Get the current size of this dataset */
+ status = SDgetinfo(sds_id, name, NULL, NULL, &ntype, NULL);
+ CHECK(status, FAIL, "SDgetinfo");
+
+ status = SDgetdatasize(sds_id, &comp_size, &uncomp_size);
+ CHECK(status, FAIL, "SDgetdatasize");
+
+ /* Append data to it */
+ status = SDwritedata (sds_id, start, NULL, edges, (VOIDP)ap_data);
+ CHECK(status, FAIL, "SDwritedata");
+
+ /* Calculate data set's size to verify later */
+ sds_size = uncomp_size + edges[0] * edges[1] * edges[2] * DFKNTsize(ntype);
+
+ /* Terminate access to the data set and file */
+ status = SDendaccess (sds_id);
+ CHECK(status, FAIL, "SDendaccess");
+
+ /* Return the size of data being written */
+ return(sds_size);
+
+} /* append_Data2SDS */
+
+/********************************************************************
+ Name: verify_datasize() - Checks data size
+ Description:
+ Calls SDgetdatasize then verifies the data size against the
+ given data_size and reports if they are different.
+ Return value:
+ None.
+ BMR - Dec 1, 2015
+*********************************************************************/
+void verify_datasize(int32 sds_id, int32 data_size, char* sds_name)
+{
+ int32 comp_size=0, uncomp_size=0;
+ char msg[80];
+ intn status;
+ intn num_errs = 0; /* number of errors in compression test so far */
+
+ /* Get the size of data set's data */
+ status = SDgetdatasize(sds_id, &comp_size, &uncomp_size);
+ CHECK(status, FAIL, "SDgetdatasize");
+ sprintf(msg, "%s on data set %s\n", "SDgetdatasize", sds_name);
+ VERIFY(data_size, uncomp_size, msg);
+
+} /* verify_datasize */
+
+#endif /* HDF */
diff --git a/mfhdf/xdr/CMakeLists.txt b/mfhdf/xdr/CMakeLists.txt
index 20855ac..b6ccf9e 100644
--- a/mfhdf/xdr/CMakeLists.txt
+++ b/mfhdf/xdr/CMakeLists.txt
@@ -1,5 +1,5 @@
-cmake_minimum_required (VERSION 2.8.10)
-PROJECT (HDF4_MFHDF_XDR C CXX)
+cmake_minimum_required (VERSION 3.1)
+project (HDF4_MFHDF_XDR C CXX)
set (HDF4_MFHDF_XDR_SRCS
${HDF4_MFHDF_XDR_SOURCE_DIR}/xdr.c
@@ -16,35 +16,57 @@ set (HDF4_MFHDF_XDR_HDRS
INCLUDE_DIRECTORIES (${HDF4_HDFSOURCE_DIR})
INCLUDE_DIRECTORIES (${HDF4_MFHDF_XDR_DIR})
-add_definitions (-DHDF)
-
-if (WIN32)
- add_definitions (-DDOS_FS -DNO_SYS_XDR_INC)
-endif (WIN32)
-
+add_library (${HDF4_MF_XDR_LIB_TARGET} STATIC ${HDF4_MFHDF_XDR_SRCS} ${HDF4_MFHDF_XDR_HDRS})
if (MSVC OR MINGW)
- add_library (${HDF4_MF_XDR_LIB_TARGET} ${LIB_TYPE} ${HDF4_MFHDF_XDR_SRCS} ${HDF4_MFHDF_XDR_HDRS})
target_link_libraries (${HDF4_MF_XDR_LIB_TARGET} "ws2_32.lib")
-else (MSVC OR MINGW)
- add_library (${HDF4_MF_XDR_LIB_TARGET} ${LIB_TYPE} ${HDF4_MFHDF_XDR_SRCS} ${HDF4_MFHDF_XDR_HDRS} )
endif (MSVC OR MINGW)
-TARGET_C_PROPERTIES (${HDF4_MF_XDR_LIB_TARGET} " " " ")
+TARGET_C_PROPERTIES (${HDF4_MF_XDR_LIB_TARGET} STATIC " " " ")
set_global_variable (HDF4_LIBRARIES_TO_EXPORT "${HDF4_LIBRARIES_TO_EXPORT};${HDF4_MF_XDR_LIB_TARGET}")
-H4_SET_LIB_OPTIONS (${HDF4_MF_XDR_LIB_TARGET} ${HDF4_MF_XDR_LIB_NAME} ${LIB_TYPE})
+H4_SET_LIB_OPTIONS (${HDF4_MF_XDR_LIB_TARGET} ${HDF4_MF_XDR_LIB_NAME} STATIC)
+set_target_properties (${HDF4_MF_XDR_LIB_TARGET} PROPERTIES
+ FOLDER libraries
+ COMPILE_DEFINITIONS "HDF"
+ INTERFACE_INCLUDE_DIRECTORIES "$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/include>"
+)
+if (WIN32)
+ set_property (TARGET ${HDF4_MF_XDR_LIB_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS "DOS_FS;NO_SYS_XDR_INC")
+endif (WIN32)
+set (install_targets ${HDF4_MF_XDR_LIB_TARGET})
+
+if (BUILD_SHARED_LIBS)
+ add_library (${HDF4_MF_XDR_LIBSH_TARGET} SHARED ${HDF4_MFHDF_XDR_SRCS} ${HDF4_MFHDF_XDR_HDRS})
+ if (MSVC OR MINGW)
+ target_link_libraries (${HDF4_MF_XDR_LIBSH_TARGET} "ws2_32.lib")
+ endif (MSVC OR MINGW)
+ TARGET_C_PROPERTIES (${HDF4_MF_XDR_LIBSH_TARGET} SHARED " " " ")
+ set_global_variable (HDF4_LIBRARIES_TO_EXPORT "${HDF4_LIBRARIES_TO_EXPORT};${HDF4_MF_XDR_LIBSH_TARGET}")
+ H4_SET_LIB_OPTIONS (${HDF4_MF_XDR_LIBSH_TARGET} ${HDF4_MF_XDR_LIB_NAME} SHARED)
+ set_target_properties (${HDF4_MF_XDR_LIBSH_TARGET} PROPERTIES
+ FOLDER libraries
+ COMPILE_DEFINITIONS "HDF;H4_BUILT_AS_DYNAMIC_LIB"
+ INTERFACE_INCLUDE_DIRECTORIES "$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/include>"
+ INTERFACE_COMPILE_DEFINITIONS H4_BUILT_AS_DYNAMIC_LIB=1
+ )
+ if (WIN32)
+ set_property (TARGET ${HDF4_MF_XDR_LIBSH_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS "DOS_FS;NO_SYS_XDR_INC")
+ endif (WIN32)
+ set (install_targets ${install_targets} ${HDF4_MF_XDR_LIBSH_TARGET})
+endif (BUILD_SHARED_LIBS)
#-----------------------------------------------------------------------------
# Add library to CMake Install : Installs lib and cmake config info
#-----------------------------------------------------------------------------
if (BUILD_SHARED_LIBS)
- INSTALL_TARGET_PDB (${HDF4_MF_XDR_LIB_TARGET} ${HDF4_INSTALL_LIB_DIR} libraries)
+ INSTALL_TARGET_PDB (${HDF4_MF_XDR_LIBSH_TARGET} ${HDF4_INSTALL_BIN_DIR} libraries)
endif (BUILD_SHARED_LIBS)
-
+
INSTALL (
- TARGETS
- ${HDF4_MF_XDR_LIB_TARGET}
- EXPORT
+ TARGETS
+ ${install_targets}
+ EXPORT
${HDF4_EXPORTED_TARGETS}
LIBRARY DESTINATION ${HDF4_INSTALL_LIB_DIR} COMPONENT libraries
ARCHIVE DESTINATION ${HDF4_INSTALL_LIB_DIR} COMPONENT libraries
RUNTIME DESTINATION ${HDF4_INSTALL_BIN_DIR} COMPONENT libraries
+ FRAMEWORK DESTINATION ${HDF4_INSTALL_FWRK_DIR} COMPONENT libraries
)
diff --git a/mfhdf/xdr/Makefile.in b/mfhdf/xdr/Makefile.in
index 2385193..eaa8e8f 100644
--- a/mfhdf/xdr/Makefile.in
+++ b/mfhdf/xdr/Makefile.in
@@ -92,7 +92,20 @@ check_PROGRAMS = xdrtest$(EXEEXT)
TESTS = $(am__EXEEXT_1)
subdir = mfhdf/xdr
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_check_class.m4 \
+ $(top_srcdir)/m4/ax_check_classpath.m4 \
+ $(top_srcdir)/m4/ax_check_java_home.m4 \
+ $(top_srcdir)/m4/ax_check_junit.m4 \
+ $(top_srcdir)/m4/ax_java_options.m4 \
+ $(top_srcdir)/m4/ax_jni_include_dir.m4 \
+ $(top_srcdir)/m4/ax_prog_jar.m4 \
+ $(top_srcdir)/m4/ax_prog_java.m4 \
+ $(top_srcdir)/m4/ax_prog_java_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javac.m4 \
+ $(top_srcdir)/m4/ax_prog_javac_works.m4 \
+ $(top_srcdir)/m4/ax_prog_javadoc.m4 \
+ $(top_srcdir)/m4/ax_try_compile_java.m4 \
+ $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
@@ -418,12 +431,27 @@ FFLAGS = @FFLAGS@
FGREP = @FGREP@
FLIBS = @FLIBS@
GREP = @GREP@
+H4_CLASSPATH = @H4_CLASSPATH@
+H4_JAVACFLAGS = @H4_JAVACFLAGS@
+H4_JAVAFLAGS = @H4_JAVAFLAGS@
H4_VERSION = @H4_VERSION@
+HDF_JAVA = @HDF_JAVA@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JAR = @JAR@
+JAVA = @JAVA@
+JAVAC = @JAVAC@
+JAVACFLAGS = @JAVACFLAGS@
+JAVADOC = @JAVADOC@
+JAVAFLAGS = @JAVAFLAGS@
+JAVAPREFIX = @JAVAPREFIX@
+JAVA_JUNIT = @JAVA_JUNIT@
+JAVA_PATH_NAME = @JAVA_PATH_NAME@
+JNIFLAGS = @JNIFLAGS@
+JUNIT = @JUNIT@
LD = @LD@
LDFLAGS = @LDFLAGS@
LEX = @LEX@
@@ -466,11 +494,14 @@ STRIP = @STRIP@
SZIP_HAS_ENCODER = @SZIP_HAS_ENCODER@
SZIP_INFO = @SZIP_INFO@
TBL = @TBL@
+TESTS_JUNIT = @TESTS_JUNIT@
TEST_FORTRAN_NETCDF = @TEST_FORTRAN_NETCDF@
UNAME_INFO = @UNAME_INFO@
USE_COMP_SZIP = @USE_COMP_SZIP@
+UUDECODE = @UUDECODE@
VERSION = @VERSION@
YACC = @YACC@
+_ACJNI_JAVAC = @_ACJNI_JAVAC@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/release_notes/HISTORY.txt b/release_notes/HISTORY.txt
index b0ae8cd..293ef59 100644
--- a/release_notes/HISTORY.txt
+++ b/release_notes/HISTORY.txt
@@ -16,6 +16,7 @@
List of the HDF4 releases
+4.2.11 February 2015
4.2.10 February 2014
4.2.9 February 2013
4.2.8 August 2012
@@ -43,6 +44,357 @@ List of the HDF4 releases
==========================================================================
+%%%4.2.11%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+HDF version 4.2.11 released on 2015-02-09
+==============================================
+
+INTRODUCTION
+
+This document describes the differences between HDF 4.2.10 and HDF 4.2.11.
+It is written for people who are familiar with previous releases of HDF
+and wish to migrate to HDF 4.2.11.
+
+The HDF 4.2.11 documentation can be found on the The HDF Group's website
+at:
+ http://www.hdfgroup.org/release4/doc/
+
+First-time HDF users are encouraged to read the HDF FAQ, which can be
+reached from the HDF product home page:
+
+ http://hdfgroup.org/products/hdf4/
+
+If you have any questions or comments, please see the HDF Support page:
+
+ http://hdfgroup.org/services/support.html
+
+CONTENTS
+
+- New features and changes
+ -- Configuration
+- Support for new platforms and compilers
+- Bugs fixed since HDF 4.2.10
+ -- Configuration
+ -- Library
+ -- Utilities
+- Documentation
+- Platforms tested
+- Known problems
+
+
+New features and changes
+========================
+ Configuration
+ =============
+ - None
+
+
+Support for new platforms and compilers
+=======================================
+ - None
+
+
+Bugs fixed since HDF 4.2.10
+=========================
+ Configuration
+ =============
+ - Windows installer incorrect display of PATH environment variable.
+
+ In the Windows installer, the dialog box where the user can elect to
+ add the product's bin path to the %PATH% environment variable displayed
+ an incorrect path. This path was missing the C:\Program Files part
+ and used the POSIX file separator '/' before the bin (<path>/bin,
+ instead of <path>\bin).
+
+ The dialog box text was changed to simply say that the product's bin
+ path would be added instead of explicitly displaying the path.
+ This is in line with most installers. The reason for not fixing the
+ displayed path instead is that it is difficult to pass the correct
+ path from CPack to the NSIS installer for display.
+
+ Note that this was never a code issue - it was just a display
+ problem. The installer always did the right thing when updating the
+ environment variable.
+
+ (DER - 2014/11/14, HDFFV-9016)
+
+ Library
+ =========
+ - Warning "array subscript is below array bounds"
+
+ Applied user's patch to remove the warning.
+
+ (BMR 2014/06/02, HDFFR-1379)
+
+
+ Utilities
+ =========
+ - Detection of read failure in ncdump
+
+ Previously, ncdump did not detect failure from ncvarget because the
+ returned value from ncvarget was not checked, and the calling function
+ simply returned 0.
+
+ The error code ERR_READFAIL (-2) is added to ncdump only to indicate this
+ failure within ncdump, which will display this message:
+
+ "Reading failed for variable <Variable name>, the data is possibly corrupted."
+
+ (BMR 2015/01/21, HDFFR-1468)
+
+ - Improvement of the ncgen's usage statement.
+
+ Previously, ncgen's usage looked like this:
+ ncgen: -: Usage: ncgen [-V] [ -b ] [ -c ] [ -f ] [ -o outfile] [ file... ]
+
+ More details are added to the usage to improve clarity. Now, it is more
+ clear, and consistent with ncdump, like this:
+ Usage: ncgen [-V] [ -b ] [ -c ] [ -f ] [ -o outfile] [ file ... ]
+ [-V] Display version of the HDF4 library and exit
+ [-b] For binary netCDF output, '.nc' extension
+ [-c] For C output
+ [-f] For Fortran output
+ [-o <outfile>] Explicitly specify output file name
+
+ (BMR 2015/01/19, HDFFR-1459)
+
+ - Output of hrepack containing an unnecessary vgroup of class RIG0.0
+
+ When the input file did not have any GR elements, hrepack still opened and
+ closed the output file using the GR API, which caused the RIG0.0 vgroup to
+ be written to the output file.
+
+ Hrepack now skips accessing the output file using GR API, when the input
+ file doesn't have any images and any GR attributes.
+
+ (BMR 2015/01/18, HDFFR-1428)
+
+ - Compliance with Fedora standard regarding printf/fprintf statements
+
+ Users sent patches for the problem where the format string is missing from
+ the printf/fprintf statements. This is in compliance with Fedora standard.
+ For more information, see
+ https://fedoraproject.org/wiki/Format-Security-FAQ.
+
+ In the context where this problem occurred, the benefit of using puts/fputs
+ over printf/fprintf is insignificant. Thus, the fix was adding "%s" to
+ those printf/fprintf statements that don't have the format string instead
+ of switching to puts/fputs.
+
+ (BMR 2014/12/16, HDFFR-1423 and HDFFR-1475)
+
+ - Failure of hdp on some hdfeos generated files
+
+ Attribute vdatas created by hdfeos API have the field named "AttrValues".
+ The utility functions Vattrhdfsize and VSattrhdfsize, in hdp.c, used
+ ATTR_FIELD_NAME ("VALUES") to verify that a vdata is storing an attribute,
+ causing failure on some hdfeos generated files. In addition, when this
+ failure occurred, the calling function tried to free allocated resources
+ prematurely.
+
+ The check against ATTR_FIELD_NAME and the premature resource deallocation
+ are removed.
+
+ (BMR 2014/12/08, HDFFR-1471)
+
+ - nclong versus long in tests
+
+ Applied the user's patch to remove a test failure.
+
+ (BMR 2014/10/21, HDFFR-1378)
+
+
+Documentation
+=============
+ - Updated Reference Manual and User's Guide
+
+ The documents were updated to contain information of the changes to
+ the tools. In addition, various improvements were applied.
+
+ (BMR 2015/2/04)
+
+
+
+Platforms tested
+================
+
+This version has been tested in the following platforms:
+
+ Linux 2.6.32-358.18.1 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-11)
+ .el6.ppc64 #1 GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-11)
+ SMP ppc64 GNU/Linux IBM XL Fortran for Linux, V15.1 (64-bit mode)
+ (ostrich)
+
+ Linux 2.6.18-308.13.1.el5 #1 gcc (GCC) 4.1.2 20080704 (Red Hat 4.1.2-55)
+ SMP i686 i386 GNU Fortran (GCC) 4.1.2 20080704
+ (jam) (Red Hat 4.1.2-55)
+ pgcc and pgf90 14.10-0 32-bit target
+ on x86 Linux -tp penryn
+ Intel(R) C Compiler, Version 15.0.1 20141022
+ Intel(R) Fortran Compiler, Version 15.0.1
+
+ Linux 2.6.18-398.el5 #1 gcc (GCC) 4.1.2 20080704 (Red Hat 4.1.2-55)
+ SMP x86_64 GNU/Linux GNU Fortran (GCC) 4.1.2 20080704
+ (koala) (Red Hat 4.1.2-55)
+ icc (ICC) 15.0.1 20141022
+ ifort (IFORT) 15.0.1 20141022
+
+ Linux 2.6.32-504.1.3.el6 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-11)
+ #1 SMP x86_64 GNU/Linux GNU Fortran (GCC) 4.4.7 20120313
+ (platypus) (Red Hat 4.4.7-11)
+ icc (ICC) 15.0.1 20141022
+ ifort (IFORT) 15.0.1 20141022
+ pgcc and pgf90 14.10-0 64-bit target
+ on x86-64 Linux -tp nehalem
+
+ Linux 3.10.0-123.8.1.el7 gcc (GCC) 4.8.2 20140120 (Red Hat 4.8.2-16)
+ #1 SMP x86_64 GNU/Linux GNU Fortran (GCC) 4.8.2 20140120
+ (aws ec2 CentOS 7 image) (Red Hat 4.8.2-16)
+
+ SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc 2011/11/16
+ (emu) (see "Known problem" section)
+ Sun Fortran 95 8.6 SunOS_sparc 2011/11/16
+
+ Windows 7 Visual Studio 2008 (cmake)
+ Visual Studio 2010 w/ Intel Fortran 14 (cmake)
+ Visual Studio 2012 w/ Intel Fortran 14 (cmake)
+ Visual Studio 2013 w/ Intel Fortran 14 (cmake)
+ Cygwin(CYGWIN_NT-6.1 1.7.32(0.274/5/3) gcc(4.8.3) compiler and gfortran)
+ (cmake and autotools)
+
+ Windows 7 x64 Visual Studio 2008 (cmake)
+ Visual Studio 2010 w/ Intel Fortran 14 (cmake)
+ Visual Studio 2012 w/ Intel Fortran 14 (cmake)
+ Visual Studio 2013 w/ Intel Fortran 14 (cmake)
+
+ Windows 8.1 Visual Studio 2012 w/ Intel Fortran 14 (cmake)
+ Visual Studio 2013 w/ Intel Fortran 14 (cmake)
+
+ Windows 8.1 x64 Visual Studio 2012 w/ Intel Fortran 14 (cmake)
+ Visual Studio 2013 w/ Intel Fortran 14 (cmake)
+
+ Mac OS X 10.7.5 Apple clang version 3.0 from Xcode 4.6.1
+ Darwin 11.4.2 gfortran GNU Fortran (GCC) 4.8.2
+ (duck) icc and ifort Version 13.0.3 20130606
+
+ Mac OS X 10.8.5 Apple clang version 5.1 from Xcode 5.1
+ Darwin 12.5.0 gfortran GNU Fortran (GCC) 4.8.2
+ (swallow,kite) icc and ifort Version 14.0.4 20140805
+
+ Mac OS X 10.9.5 Apple clang version 6.0 from Xcode 6.0.1
+ Darwin 13.4.0 gfortran GNU Fortran (GCC) 4.8.2
+ (wren,quail) icc and ifort Version 15.0.1 20141022
+
+ Debian7.5.0 3.2.0-4-amd64 #1 SMP Debian 3.2.51-1 x86_64 GNU/Linux
+ gcc (Debian 4.7.2-5) 4.7.2
+ GNU Fortran (Debian 4.7.2-5) 4.7.2
+ (cmake and autotools)
+
+ Fedora20 3.15.3-200.fc20.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux
+ gcc (GCC) 4.8.3 20140624 (Red Hat 4.8.3-1)
+ GNU Fortran (GCC) 4.8.3 20140624 (Red Hat 4.8.3-1)
+ (cmake and autotools)
+
+ SUSE 13.1 3.11.10-17-desktop #1 SMP PREEMPT x86_64 x86_64 x86_64 GNU/Linux
+ gcc (SUSE Linux) 4.8.1
+ GNU Fortran (SUSE Linux) 4.8.1
+ (cmake and autotools)
+
+ Ubuntu 14.04 3.13.0-35-generic #62-Ubuntu SMP x86_64 GNU/Linux
+ gcc (Ubuntu/Linaro 4.9.1-0ubuntu1) 4.9.1
+ GNU Fortran (Ubuntu/Linaro 4.9.1-0ubuntu1) 4.9.1
+ (cmake and autotools)
+
+
+Known problems
+==============
+o Several Fortran examples print "^@" when displaying strings (for example,
+ names of the attributes). This happens because Fortran application
+ doesn't know the length of the strings passed from the C library.
+ EIP - 2015-01-11, HDFFR-1477
+
+o CMake builds in Windows uses the same pre-generated ncgen*.[ch] files from
+ the yacc/lex input files. The generated file, ncgenyy.c, uses the <unistd.h>
+ header file that Windows does not support. This must be blocked out in
+ order for Windows to use it.
+ AKC 2014-02-03, HDFFR-1424
+
+o CMake "make install" fails installing the tools:
+ Use CPack to create an install package.
+ ADB - 2014/02/03
+
+o CMake does not install these man pages:
+ hdf.1, ncdump.1, ncgen.1
+ AKC/BMR - 2014/02/02
+
+o For Mac OS X 10.7 Lion and on 10.8 Mountain Lion, several tests fail with
+ GCC, Intel and Clang compilers. Currently, this situation is detected and
+ -O0 level optimization is used.
+ (HDFFR-1318,1358) EIP - 2013/02/05
+
+o On IBM PowerPC 64, hdftest fails when gcc 4.4.6 is used with -O3 optimization
+ level.
+
+o When building in AIX systems, if CC is xlc with -qlanglvl=ansi, configure
+ will fail when checking for the jpeglib.h header due to the duplicated
+ macro definition of HAVE_STDLIB_H. This is because some newer builds
+ of the jpeg library have HAVE_STDLIB_H defined in the jconfig.h header file.
+ Without the -qlanglvl=ansi, some older xlc versions (e.g., V7.0) still
+ fail, but newer xlc versions (e.g., V9.0) pass. AKC - 2010/02/17
+
+o When building on Linux/UNIX platforms, the szip shared library files must
+ be in the system library path. This can be done by adding a link to
+ the libsz.* files in the /usr/lib folder or by adding the library
+ location to the LD_LIBRARY_PATH environment variable.
+ Ex. export LD_LIBRARY_PATH=path_to_szip_lib:$LD_LIBRARY_PATH
+ Optionally, one can use the static szip library files by adding '-static'
+ to the CFLAGS environment variable.
+
+o Existing data written by an HDF4 Library prior to HDF 4.2r2:
+ When a one-dimensional SDS and a dimension scale have
+ the same name, subsequent accesses to the dimension scale or to the
+ SDS might produce undesired results because the libraries could not
+ distinguish between the two objects. In the case of writing, data
+ might even be corrupted. For example, SDS data might be written to a
+ dimension variable or vice versa. (bugzilla #624)
+
+ HDF4 Library Releases 4.2r2 and later make a distinction between an SDS
+ and a dimension variable. However, as with older versions, these recent
+ versions are unable to detect such conflicts in files created by earlier
+ releases. It is therefore STRONGLY recommended to check for such name
+ duplication before working with data created with a pre-4.2r2 library.
+
+ The functions SDgetnumvars_byname and SDnametoindices are provided
+ to help detect such name conflicts and select the correct object to
+ access, respectively; see the HDF Reference Manual entries for
+ further details.
+ FB - 2009/01/26
+ BMR - revised 2011/06/24
+
+o N-bit compression is not supported with Fortran APIs.
+
+o Using both fill-value and compression on SD datasets does not work.
+
+o When using PGI compilers, make sure that the JPEG library is also compiled
+ with a PGI C compiler; linking with a JPEG library built with gcc causes
+ JPEG library tests to fail. To bypass the problem:
+
+ x Set LIBS flag to $PGI_JPEG_INSTALL_DIR/lib/libjpeg.a
+ where $PGI_JPEG_INSTALL_DIR points to the installation directory
+ for the PGI-compiled JPEG library:
+
+ setenv LIBS $PGI_JPEG_INSTALL_DIR/lib/libjpeg.a
+
+ x Use the --with-jpeg=$PGI_JPEG_INSTALL_DIR configure flag to
+ configure with the PGI-compiled JPEG library:
+
+ ./configure --with-jpeg=$PGI_JPEG_INSTALL_DIR --with-zlib....
+
+o In order for the API SDgetdatasize to get the correct compressed size
+ of the data, the dataset needs to be closed (SDendaccess) or read
+ (SDreaddata) after being written and before SDgetdatasize is called.
+ BMR - 2008/11/22
+
+
%%%4.2.10%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
HDF version 4.2.10 released on 2014-02-09
==============================================
diff --git a/release_notes/INSTALL b/release_notes/INSTALL
index a55be5e..b5045db 100644
--- a/release_notes/INSTALL
+++ b/release_notes/INSTALL
@@ -33,10 +33,10 @@ The HDF Group's FTP server at ftp://ftp.hdfgroup.org/lib-external/zlib/.
=================================
HDF4 may be configured to use the Szip compression Library. For more
-information about the Szip library, see http://hdfgroup.org/doc_resource/SZIP/.
+information about the Szip library, see https://hdfgroup.org/doc_resource/SZIP/.
The Szip compression library is free for non-commercial use; see
-http://hdfgroup.org/doc_resource/SZIP/Commercial_szip.html for information
+https://hdfgroup.org/doc_resource/SZIP/Commercial_szip.html for information
regarding commercial use.
diff --git a/release_notes/INSTALL_CMake.txt b/release_notes/INSTALL_CMake.txt
index 132038e..9b12853 100644
--- a/release_notes/INSTALL_CMake.txt
+++ b/release_notes/INSTALL_CMake.txt
@@ -3,11 +3,11 @@
*************************************************************************
Table of Contents
-
+
Section I: Preconditions
-Section II: Quick Step Building HDF4 Libraries with CMake Script Mode
+Section II: Quick Step Building HDF4 Libraries with CMake Script Mode
Section III: Quick Step Building HDF4 Libraries with CMake Command Mode
-Section IV: Further considerations
+Section IV: Further considerations
Section V: Options for building HDF4 Libraries with CMake command line
Section VI: CMake option defaults for HDF4
Section VII: User Defined Options for HDF4 Libraries with CMake
@@ -19,31 +19,35 @@ Section VIII: Options for platform configuration files
========================================================================
I. Preconditions
========================================================================
-Obtaining HDF4 source code
+Obtaining HDF4 source code
1. Create a directory for your development; for example, "myhdfstuff".
2. Obtain compressed (*.tar or *.zip) HDF4 source from
- http://www.hdfgroup.org/ftp/HDF/HDF_Current/src/
+ https://www.hdfgroup.org/ftp/HDF/HDF_Current/src/
and put it in "myhdfstuff".
Do not uncompress.
CMake version
- 1. We suggest you obtain the latest CMake from the Kitware web site.
- The HDF 4.2."X" product requires a minimum CMake version 2.8.12,
- where "X" is the current HDF4 release version.
+ 1. We suggest you obtain the latest CMake from the Kitware web site.
+ The HDF 4.2."X" product requires a minimum CMake version 3.1.0,
+ where "X" is the current HDF4 release version.
+
+Note:
+ To change the install prefix from the platform defaults initialize
+ the CMake variable, CMAKE_INSTALL_PREFIX.
========================================================================
II. Quick Step Building HDF4 Libraries with CMake Script Mode
========================================================================
-This short set of instructions is written for users who want to quickly
+This short set of instructions is written for users who want to quickly
build the HDF4 C, C++, and Fortran shared libraries and tools
-from the HDF4 source code package using the CMake tools. This procedure
+from the HDF4 source code package using the CMake tools. This procedure
will use the default settings in the config/cmake/cacheinit.cmake file.
HDF Group recommends using the ctest script mode to build HDF4.
The following files referenced below are available at the HDF web site:
- http://www.hdfgroup.org/release4/cmakebuild.html
+ https://www.hdfgroup.org/release4/cmakebuild.html
CMake build script:
CTestScript.cmake
@@ -52,7 +56,7 @@ External compression szip and zlib libraries:
JPEG8b.tar.gz
SZip.tar.gz
ZLib.tar.gz
-
+
Platform configuration files:
HDF4Windows64CMake.cmake
HDF4Windows32CMake.cmake
@@ -62,41 +66,41 @@ Platform configuration files:
To build HDF4 with the SZIP, ZLIB and JPEG external libraries you will need to:
1. Change to the development directory "myhdfstuff".
-
+
2. Download the SZip.tar.gz, ZLib.tar.gz, and JPEG.tar.gz to "myhdfstuff".
Do not uncompress the files.
-
+
3. Download the CTestScript.cmake file to "myhdfstuff".
- CTestScript.cmake file should not be modified.
-
+ CTestScript.cmake file should not be modified.
+
4. Download a platform configuration file to "myhdfstuff".
- Do not modify the file unless you want to change default build
+ Do not modify the file unless you want to change default build
environment.
- 5. From the "myhdfstuff" directory execute the CTest Script with the
+ 5. From the "myhdfstuff" directory execute the CTest Script with the
following options:
ctest -S <configuration file> -C Release -VV -O hdf4.log
- The command above will configure, build, test, and create an install
+ The command above will configure, build, test, and create an install
package in the myhdfstuff/hdf-4.2/build folder.
The -S option uses the script version of ctest.
-
- The value for the -C option (as shown above, "-C Release") must
- match the setting for CTEST_BUILD_CONFIGURATION in the platform
+
+ The value for the -C option (as shown above, "-C Release") must
+ match the setting for CTEST_BUILD_CONFIGURATION in the platform
configuration file.
-
+
The -VV option is for verbose; use -V for less verbose.
-
+
The "-O hdf4.log" option saves the output to a log file hdf5.log.
6. To install, "X" is the current release version
-
+
On Windows, execute:
HDF-4.2."X"-win32.exe or HDF-4.2."X"-win64.exe
By default this program will install the hdf4 library into the
- "C:\Program Files" directory and will create the following
+ "C:\Program Files" directory and will create the following
directory structure:
HDF_Group
--HDF
@@ -106,14 +110,14 @@ To build HDF4 with the SZIP, ZLIB and JPEG external libraries you will need to:
------lib
------cmake
- On Linux, change to the install destination directory
+ On Linux, change to the install destination directory
(create it if doesn't exist) and execute:
<path-to>/myhdfstuff/hdf-4.2/build/HDF-4.2."X"-Linux.sh
After accepting the license, the script will prompt:
By default the HDF4 will be installed in:
"<current directory>/HDF-4.2."X"-Linux"
Do you want to include the subdirectory HDF5-1.8."X"-Linux?
- Saying no will install in: "<current directory>" [Yn]:
+ Saying no will install in: "<current directory>" [Yn]:
Note that the script will create the following directory structure
relative to the install point:
HDF_Group
@@ -123,10 +127,10 @@ To build HDF4 with the SZIP, ZLIB and JPEG external libraries you will need to:
------include
------lib
------share
-
+
On Mac you will find HDF-4.2."X"-Darwin.dmg in the build folder. Click
on the dmg file to proceed with installation. After accepting the license,
- there will be a folder with the following structure:
+ there will be a folder with the following structure:
HDF_Group
--HDF
----4.2."X"
@@ -134,7 +138,7 @@ To build HDF4 with the SZIP, ZLIB and JPEG external libraries you will need to:
------include
------lib
------share
-
+
By default the installation will create the bin, include, lib and cmake
folders in the <install destination directory>/HDF_Group/HDF/4.2.
@@ -142,20 +146,20 @@ To build HDF4 with the SZIP, ZLIB and JPEG external libraries you will need to:
========================================================================
III. Quick Step Building HDF4 C Static Libraries and Tools with CMake
========================================================================
-Notes: This short set of instructions is written for users who want to
- quickly build the just the HDF4 C static library and tools from
+Notes: This short set of instructions is written for users who want to
+ quickly build the just the HDF4 C static library and tools from
the HDF4 source code package using the CMake command line tools.
-
+
Go through these steps:
1. Change to the development directory "myhdfstuff".
2. Uncompress the HDF4 source file
-
+
3. Create a folder "build" in the "myhdfstuff" directory.
-
- 4. Change into the "build" folder.
-
+
+ 4. Change into the "build" folder.
+
5. Configure the C library, tools and tests with one of the following commands:
On Windows 32 bit
@@ -163,26 +167,26 @@ Notes: This short set of instructions is written for users who want to
On Windows 64 bit
cmake -G "Visual Studio 11 Win64" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_TESTING:BOOL=ON -DHDF4_BUILD_TOOLS:BOOL=ON ..\hdf-4.2."X"
-
+
On Linux and Mac
cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_TESTING:BOOL=ON -DHDF4_BUILD_TOOLS:BOOL=ON ../hdf-4.2."X"
where "X" is the current release version.
-
+
6. Build the C library, tools and tests with this command:
cmake --build . --config Release
-
+
7. Test the C library and tools with this command:
ctest . -C Release
-
+
8. Create an install image with this command:
cpack -C Release CPackConfig.cmake
-
- 9. To install
+
+ 9. To install
On Windows, execute:
HDF-4.2."X"-win32.exe or HDF-4.2."X"-win64.exe
By default this program will install the hdf4 library into the
- "C:\Program Files" directory and will create the following
+ "C:\Program Files" directory and will create the following
directory structure:
HDF_Group
--HDF
@@ -192,14 +196,14 @@ Notes: This short set of instructions is written for users who want to
------lib
------cmake
- On Linux, change to the install destination directory
+ On Linux, change to the install destination directory
(create if doesn't exist) and execute:
<path-to>/myhdfstuff/build/HDF-4.2."X"-Linux.sh
After accepting the license, the script will prompt:
By default the HDF4 will be installed in:
"<current directory>/HDF-4.2."X"-Linux"
Do you want to include the subdirectory HDF-4.2."X"-Linux?
- Saying no will install in: "<current directory>" [Yn]:
+ Saying no will install in: "<current directory>" [Yn]:
Note that the script will create the following directory structure
relative to the install point:
HDF_Group
@@ -209,10 +213,10 @@ Notes: This short set of instructions is written for users who want to
------include
------lib
------share
-
+
On Mac you will find HDF-4.2."X"-Darwin.dmg in the build folder. Click
on the dmg file to proceed with installation. After accepting the license,
- there will be a folder with the following structure:
+ there will be a folder with the following structure:
HDF_Group
--HDF
----4.2."X"
@@ -223,15 +227,15 @@ Notes: This short set of instructions is written for users who want to
========================================================================
-IV. Further considerations
+IV. Further considerations
========================================================================
1. We suggest you obtain the latest CMake for windows from the Kitware
- web site. The HDF 4.2."X" product requires a minimum CMake version 2.8.12.
-
+ web site. The HDF 4.2."X" product requires a minimum CMake version 3.1.0.
+
2. HDF4 requires Zlib and JPEG. Szip is optional:
- A. Download the binary packages and install them in a central location.
- For example on Windows, create a folder extlibs and install the
+ A. Download the binary packages and install them in a central location.
+ For example on Windows, create a folder extlibs and install the
packages there. Add the following CMake options:
-DJPEG_LIBRARY:FILEPATH=some_location/lib/jpeg.lib
-DJPEG_INCLUDE_DIR:PATH=some_location/include
@@ -239,31 +243,31 @@ IV. Further considerations
-DZLIB_INCLUDE_DIR:PATH=some_location/include
-DSZIP_LIBRARY:FILEPATH=some_location/lib/szlib.lib
-DSZIP_INCLUDE_DIR:PATH=some_location/include
- where "some_location" is the full path to the extlibs folder.
+ where "some_location" is the full path to the extlibs folder.
Note that you can use either JPEG 6b or 8b.
- B. Use source packages from an SVN server by adding the following CMake
+ B. Use source packages from an SVN server by adding the following CMake
options:
-
+
HDF4_ALLOW_EXTERNAL_SUPPORT:STRING="SVN"
- JPEG_SVN_URL:STRING="http://some_location/jpeg/trunk"
- ZLIB_SVN_URL:STRING="http://some_location/zlib/trunk"
- SZIP_SVN_URL:STRING="http://some_location/szip/trunk"
+ JPEG_SVN_URL:STRING="https://some_location/jpeg/trunk"
+ ZLIB_SVN_URL:STRING="https://some_location/zlib/trunk"
+ SZIP_SVN_URL:STRING="https://some_location/szip/trunk"
where "some_location" is the URL to the SVN repository. Also set
CMAKE_BUILD_TYPE to the configuration type.
-
- C. Use source packages from a compressed file by adding the following
+
+ C. Use source packages from a compressed file by adding the following
CMake options:
HDF4_ALLOW_EXTERNAL_SUPPORT:STRING="TGZ"
JPEG_TGZ_NAME:STRING="jpeg_src.ext"
ZLIB_TGZ_NAME:STRING="zlib_src.ext"
SZIP_TGZ_NAME:STRING="szip_src.ext"
TGZPATH:STRING="some_location"
- where "some_location" is the URL or full path to the compressed
+ where "some_location" is the URL or full path to the compressed
file and ext is the type of compression file. Also set CMAKE_BUILD_TYPE
to the configuration type during configuration
-
- 3. If you are building on Apple Darwin platforms, you should add the
+
+ 3. If you are building on Apple Darwin platforms, you should add the
following options:
Compiler choice - use xcode by setting the ENV variables of CC and CXX
Shared fortran is not supported, build static:
@@ -271,12 +275,12 @@ IV. Further considerations
Additional options:
CMAKE_ANSI_CFLAGS:STRING=-fPIC
CTEST_USE_LAUNCHERS:BOOL=ON
- CMAKE_BUILD_WITH_INSTALL_RPATH:BOOL=OFF
-
+ CMAKE_BUILD_WITH_INSTALL_RPATH:BOOL=OFF
+
4. Windows developers should install NSIS to create an install image with CPack.
Visual Studio Express users will not be able to package HDF5 into
an install image executable.
-
+
5. Developers can copy the config/cmake/cacheinit.cmake file and alter the
the settings for the developers' environment. Then the only options needed
on the command line are those options that are different. Example using HDF
@@ -287,32 +291,32 @@ IV. Further considerations
Notes: CMake and HDF4
- 1. Using CMake for building and using HDF4 is under active development.
- While we have attempted to provide error-free files, please
- understand that development with CMake has not been extensively
+ 1. Using CMake for building and using HDF4 is under active development.
+ While we have attempted to provide error-free files, please
+ understand that development with CMake has not been extensively
tested outside of HDF. The CMake specific files may change
before the next release.
-
- 2. CMake support for HDF4 development should be usable on any
- system where CMake is supported. Please send us any comments on
- how CMake support can be improved on any system. Visit the
+
+ 2. CMake support for HDF4 development should be usable on any
+ system where CMake is supported. Please send us any comments on
+ how CMake support can be improved on any system. Visit the
KitWare site for more information about CMake.
-
+
3. Build and test results can be submitted to our CDash server at:
- cdash.hdfgroup.uiuc.edu.
+ cdash.hdfgroup.org.
Please read the HDF and CDash document at:
- www.hdfgroup.org/CDash/HowToSubmit.
-
+ www.hdfgroup.org/CDash/HowToSubmit.
+
4. See the appendix at the bottom of this file for examples of using
a ctest script for building and testing. Using a ctest script is
preferred because of its flexibility.
-
+
Notes: CMake in General
- 1. More information about using CMake can be found at the KitWare site at
+ 1. More information about using CMake can be found at the KitWare site at
www.cmake.org.
-
- 2. CMake uses the command line; however, the visual CMake tool is
+
+ 2. CMake uses the command line; however, the visual CMake tool is
available for the configuration step. The steps are similar for
all the operating systems supported by CMake.
@@ -337,27 +341,27 @@ These five steps are described in detail below.
The visual CMake executable is named "cmake-gui.exe" on Windows and should be
available in your Start menu. For Linux, UNIX, and Mac users the
- executable is named "cmake-gui" and can be found where CMake was
- installed.
-
- Specify the source and build directories.
-
+ executable is named "cmake-gui" and can be found where CMake was
+ installed.
+
+ Specify the source and build directories.
+
***** Make the build and source directories different. ******
-
- For example on Windows, if the source is at c:\MyHDFstuff\hdf4,
- then use c:\MyHDFstuff\hdf4\build or c:\MyHDFstuff\build\hdf4 as the
+
+ For example on Windows, if the source is at c:\MyHDFstuff\hdf4,
+ then use c:\MyHDFstuff\hdf4\build or c:\MyHDFstuff\build\hdf4 as the
build directory.
-
+
RECOMMENDED:
- Users can perform the configuration step without using the visual
- cmake-gui program. We use the file cacheinit.cmake in the
- config/cmake source folder for our testing. This file enables all of the
- basic options and we turn specific options on or off for testing
+ Users can perform the configuration step without using the visual
+ cmake-gui program. We use the file cacheinit.cmake in the
+ config/cmake source folder for our testing. This file enables all of the
+ basic options and we turn specific options on or off for testing
using the following command line within the build directory:
-
+
cmake -C <sourcepath>/config/cmake/cacheinit.cmake -G "<generator>" [-D<options>] <sourcepath>
-
- Where <generator> is
+
+ Where <generator> is
* MinGW Makefiles
* NMake Makefiles
* Unix Makefiles
@@ -402,135 +406,135 @@ These five steps are described in detail below.
set (HDF4_NO_PACKAGES OFF CACHE BOOL "CPACK - Disable packaging" FORCE)
set (HDF4_ALLOW_EXTERNAL_SUPPORT "NO" CACHE STRING "Allow External Library Building (NO SVN TGZ)" FORCE)
set_property (CACHE HDF4_ALLOW_EXTERNAL_SUPPORT PROPERTY STRINGS NO SVN TGZ)
- set (ZLIB_SVN_URL "http://svn.hdfgroup.uiuc.edu/zlib/trunk" CACHE STRING "Use ZLib from HDF repository" FORCE)
- set (SZIP_SVN_URL "http://svn.hdfgroup.uiuc.edu/szip/trunk" CACHE STRING "Use SZip from HDF repository" FORCE)
- set (JPEG_SVN_URL "http://svn.hdfgroup.uiuc.edu/jpeg/branches/jpeg8b" CACHE STRING "Use JPEG from HDF repository" FORCE)
+ set (ZLIB_SVN_URL "https://svn.hdfgroup.org/zlib/trunk" CACHE STRING "Use ZLib from HDF repository" FORCE)
+ set (SZIP_SVN_URL "https://svn.hdfgroup.org/szip/trunk" CACHE STRING "Use SZip from HDF repository" FORCE)
+ set (JPEG_SVN_URL "https://svn.hdfgroup.org/jpeg/branches/jpeg8b" CACHE STRING "Use JPEG from HDF repository" FORCE)
set (ZLIB_TGZ_NAME "ZLib.tar.gz" CACHE STRING "Use ZLib from compressed file" FORCE)
set (SZIP_TGZ_NAME "SZip.tar.gz" CACHE STRING "Use SZip from compressed file" FORCE)
set (JPEG_TGZ_NAME "JPEG8b.tar.gz" CACHE STRING "Use JPEG from compressed file" FORCE)
2. Configure the cache settings
- 2.1 Visual CMake users, click the Configure button. If this is the first time you are
- running cmake-gui in this directory, you will be prompted for the
- generator you wish to use (for example on Windows, Visual Studio 11).
- CMake will read in the CMakeLists.txt files from the source directory and
- display options for the HDF4 project. After the first configure you
+ 2.1 Visual CMake users, click the Configure button. If this is the first time you are
+ running cmake-gui in this directory, you will be prompted for the
+ generator you wish to use (for example on Windows, Visual Studio 11).
+ CMake will read in the CMakeLists.txt files from the source directory and
+ display options for the HDF4 project. After the first configure you
can adjust the cache settings and/or specify the locations of other programs.
-
+
Any conflicts or new values will be highlighted by the configure
- process in red. Once you are happy with all the settings and there are no
- more values in red, click the Generate button to produce the appropriate
- build files.
-
- On Windows, if you are using a Visual Studio generator, the solution and
+ process in red. Once you are happy with all the settings and there are no
+ more values in red, click the Generate button to produce the appropriate
+ build files.
+
+ On Windows, if you are using a Visual Studio generator, the solution and
project files will be created in the build folder.
-
+
On linux, if you are using the Unix Makefiles generator, the Makefiles will
be created in the build folder.
2.2 Preferred command line example on Windows in c:\MyHDFstuff\hdf4\build directory:
-
+
cmake -C ../config/cmake/cacheinit.cmake -G "Visual Studio 11" \
-DHDF4_ENABLE_SZIP_SUPPORT:BOOL=OFF -DHDF4_ENABLE_Z_LIB_SUPPORT:BOOL=OFF \
-DCMAKE_BUILD_TYPE:STRING=Release ..
- 2.3 On Windows, if you are using a Visual Studio Express version you must
+ 2.3 On Windows, if you are using a Visual Studio Express version you must
be sure that the following two options are correctly set/unset:
-
+
HDF4_NO_PACKAGES:BOOL=ON
HDF4_USE_FOLDERS:BOOL=OFF
-
+
3. Build HDF4
-
- On Windows, you can build HDF4 using either the Visual Studio Environment
+
+ On Windows, you can build HDF4 using either the Visual Studio Environment
or the command line. The command line can be used on all platforms;
Windows, linux, Unix, and Mac.
To build from the command line, navigate to your build directory and
execute the following:
-
- cmake --build . --config {Debug | Release}
-
+
+ cmake --build . --config {Debug | Release}
+
NOTE: "--config {Debug | Release}" may be optional on your platform. We
- recommend choosing either Debug or Release on Windows.
-
- 3.1 If you wish to use the Visual Studio environment, open the solution
- file in your build directory. Be sure to select either Debug or
+ recommend choosing either Debug or Release on Windows.
+
+ 3.1 If you wish to use the Visual Studio environment, open the solution
+ file in your build directory. Be sure to select either Debug or
Release and build the solution.
-
+
3.2.1 The external libraries (zlib, Szip, and jpeg) can be configured
to allow building the libraries by downloading from an SVN repository.
The option is 'HDF4_ALLOW_EXTERNAL_SUPPORT'; by adding the following
configuration option:
-DHDF4_ALLOW_EXTERNAL_SUPPORT:STRING="SVN"
-
+
The options to control the SVN URL (config/cmake/cacheinit.cmake file) are:
- JPEG_SVN_URL:STRING="http://svn.hdfgroup.uiuc.edu/jpeg8b/trunk"
- ZLIB_SVN_URL:STRING="http://svn.hdfgroup.uiuc.edu/zlib/trunk"
- SZIP_SVN_URL:STRING="http://svn.hdfgroup.uiuc.edu/szip/trunk"
+ JPEG_SVN_URL:STRING="https://svn.hdfgroup.org/jpeg8b/trunk"
+ ZLIB_SVN_URL:STRING="https://svn.hdfgroup.org/zlib/trunk"
+ SZIP_SVN_URL:STRING="https://svn.hdfgroup.org/szip/trunk"
These should be changed to your location. Also define CMAKE_BUILD_TYPE
to be the configuration type.
-
+
3.2.2 Or the external libraries (zlib, Szip, and jpeg) can be configured
to allow building the libraries by using a compressed file.
- The option is 'HDF4_ALLOW_EXTERNAL_SUPPORT' and is enabled by
+ The option is 'HDF4_ALLOW_EXTERNAL_SUPPORT' and is enabled by
adding the following configuration option:
-DHDF4_ALLOW_EXTERNAL_SUPPORT:STRING="TGZ"
-
- The options to control the SVN URL (config/cmake/cacheinit.cmake
+
+ The options to control the SVN URL (config/cmake/cacheinit.cmake
file) are:
JPEG_TGZ_NAME:STRING="jpeg_src.ext"
ZLIB_TGZ_NAME:STRING="zlib_src.ext"
SZIP_TGZ_NAME:STRING="szip_src.ext"
TGZPATH:STRING="some_location"
- where "some_location/xxxx_src.ext" is the URL or full path to
- the compressed file and where ext is the type of the compression
+ where "some_location/xxxx_src.ext" is the URL or full path to
+ the compressed file and where ext is the type of the compression
file such as .bz2, .tar, .tar.gz, .tgz, or .zip. Also define
CMAKE_BUILD_TYPE to be the configuration type.
4. Test HDF4
To test the build, navigate to your build directory and execute:
-
+
ctest . -C {Debug | Release}
-
+
NOTE: "-C {Debug | Release}" may be optional on your platform. We
recommend choosing either Debug or Release to match the build
- step on Windows.
+ step on Windows.
5. Packaging HDF4 (create an install image)
-
+
To package the build into a simple installer using the NullSoft installer NSIS
on Windows, or into compressed files (.tar.gz, .sh, .zip), use the CPack tool.
To package the build, navigate to your build directory and execute;
-
+
cpack -C {Debug | Release} CPackConfig.cmake
-
+
NOTE: See note 8 of this document for NSIS information.
- Also, if you are using a Visual Studio Express version or do not
- want to enable the packaging components, set HDF4_NO_PACKAGES
+ Also, if you are using a Visual Studio Express version or do not
+ want to enable the packaging components, set HDF4_NO_PACKAGES
to ON (on the command line add -DHDF4_NO_PACKAGES:BOOL=ON)
-
- 6. The files that support building HDF4 with CMake are all the files in the
- config/cmake folder, the CMakeLists.txt files in each source folder, and
- CTestConfig.cmake. CTestConfig.cmake is specific to the internal testing
- performed by The HDF Group. It should be altered for the user's
+
+ 6. The files that support building HDF4 with CMake are all the files in the
+ config/cmake folder, the CMakeLists.txt files in each source folder, and
+ CTestConfig.cmake. CTestConfig.cmake is specific to the internal testing
+ performed by The HDF Group. It should be altered for the user's
installation and needs. The cacheinit.cmake file settings are used by
- The HDF Group for daily testing. It should be altered/ignored for the user's
+ The HDF Group for daily testing. It should be altered/ignored for the user's
installation and needs.
- 7. More information about using CMake can be found at the KitWare site,
+ 7. More information about using CMake can be found at the KitWare site,
www.cmake.org.
-
+
8. Nullsoft Scriptable Install System
- The Nullsoft Scriptable Install System (NSIS) is an open source installation
- system. It was created by the WinAmp authors to distribute that application,
- but it is now a general-purpose system which anyone might use. NSIS installers
- recognize /S for silent installation and /D=dir to specify the
- "output directory", which is where the program will be installed. These
- options are case-sensitive, so be sure to type them in upper case.
+ The Nullsoft Scriptable Install System (NSIS) is an open source installation
+ system. It was created by the WinAmp authors to distribute that application,
+ but it is now a general-purpose system which anyone might use. NSIS installers
+ recognize /S for silent installation and /D=dir to specify the
+ "output directory", which is where the program will be installed. These
+ options are case-sensitive, so be sure to type them in upper case.
@@ -538,8 +542,8 @@ These five steps are described in detail below.
VI. CMake Option Defaults for HDF4
========================================================================
-In the options listed below, there are three columns of information:
-Option Name, Option Description, and Option Default.
+In the options listed below, there are three columns of information:
+Option Name, Option Description, and Option Default.
The config/cmake/cacheinit.cmake file overrides the following values.
---------------- General Build Options ---------------------
@@ -563,9 +567,9 @@ HDF4_ENABLE_PARALLEL "Enable parallel build (requires MPI)"
HDF4_NO_PACKAGES "Do not include CPack Packaging" OFF
HDF4_PACKAGE_EXTLIBS "CPACK - include external libraries" OFF
HDF4_USE_FOLDERS "Enable folder grouping of projects in IDEs." OFF
-if (APPLE)
+if (APPLE)
HDF4_BUILD_WITH_INSTALL_NAME "Build with library install_name set to the installation path" OFF
-if (WIN32)
+if (WIN32)
HDF_LEGACY_NAMING "Use Legacy Names for Libraries and Programs" OFF
---------------- External Library Options ---------------------
@@ -576,7 +580,7 @@ HDF4_ENABLE_Z_LIB_SUPPORT "Enable Zlib Filters" ON
JPEG_USE_EXTERNAL "Use External Library Building for JPEG" 0
SZIP_USE_EXTERNAL "Use External Library Building for SZIP" 0
ZLIB_USE_EXTERNAL "Use External Library Building for ZLIB" 0
-if (HDF4_ENABLE_SZIP_SUPPORT)
+if (HDF4_ENABLE_SZIP_SUPPORT)
HDF4_ENABLE_SZIP_ENCODING "Use SZip Encoding" OFF
@@ -588,9 +592,9 @@ VII. User Defined Options for HDF4 Libraries with CMake
Support for User Defined macros and options has been added. The file
UserMacros.cmake has an example of the technique. In the folder,
config/cmake/UserMacros, is an implementation for Windows Visual Studio
-users for linking libraries to the static CRT - Windows_MT.cmake.
+users for linking libraries to the static CRT - Windows_MT.cmake.
-Copy the contents of the file, both macro and option, into the
+Copy the contents of the file, both macro and option, into the
UserMacros.cmake file. Then enable the option to the CMake configuration,
build and test process.
@@ -599,17 +603,17 @@ VIII. Options for Platform Configuration Files
========================================================================
Below is the HDF4LinuxCmake.cmake ctest script with extra comments.
-The example is for a linux machine, but the same scripts can be used on
+The example is for a linux machine, but the same scripts can be used on
a Windows machine by adjusting the CTEST_CMAKE_GENERATOR option in the
platform configuration script.
############################################################################
-# Product specific script, HDF4LinuxCmake.cmake, that uses the
+# Product specific script, HDF4LinuxCmake.cmake, that uses the
# CTestScript.cmake file. Usage:
# "ctest -S HDF4LinuxCmake.cmake -C Release -O hdf4shared.log"
############################################################################
-cmake_minimum_required(VERSION 2.8.10 FATAL_ERROR)
+cmake_minimum_required(VERSION 3.1.0 FATAL_ERROR)
# this is the location of the source hardcoded to hdf-4.2.11
set(CTEST_SOURCE_NAME hdf-4.2.11)
@@ -667,7 +671,7 @@ set(SITE_BUILDNAME_SUFFIX "SHARED")
######### Following controls source update ##########
#set(LOCAL_UPDATE "TRUE")
######### Following controls source repository ##########
-#set(REPOSITORY_URL "http://svn.hdfgroup.uiuc.edu/hdf4/trunk")
+#set(REPOSITORY_URL "https://svn.hdfgroup.org/hdf4/trunk")
#comment to use a uncompressed source folder
set(CTEST_USE_TAR_SOURCE "hdf-4.2.11")
###################################################################
diff --git a/release_notes/INSTALL_CYGWIN.txt b/release_notes/INSTALL_CYGWIN.txt
index 52a4719..eec2cb8 100644
--- a/release_notes/INSTALL_CYGWIN.txt
+++ b/release_notes/INSTALL_CYGWIN.txt
@@ -73,10 +73,10 @@ Preconditions:
HDF4 may be configured to use the SZIP compression Library.
For more information about the SZIP library, see
- http://hdfgroup.org/doc_resource/SZIP/
+ https://hdfgroup.org/doc_resource/SZIP/
The SZIP compression library is free for non-commercial use;
- see http://hdfgroup.org/doc_resource/SZIP/Commercial_szip.html
+ see https://hdfgroup.org/doc_resource/SZIP/Commercial_szip.html
for information regarding commercial use.
diff --git a/release_notes/RELEASE.txt b/release_notes/RELEASE.txt
index 30c897c..77bfa02 100644
--- a/release_notes/RELEASE.txt
+++ b/release_notes/RELEASE.txt
@@ -1,31 +1,31 @@
-HDF version 4.2.11 released on 2015-02-09
+HDF version 4.2.12 released on 2016-06-29
====================================================
INTRODUCTION
-This document describes the differences between HDF 4.2.10 and HDF 4.2.11.
-It is written for people who are familiar with previous releases of HDF
-and wish to migrate to HDF 4.2.11.
+This document describes the differences between HDF 4.2.11 and HDF 4.2.12.
+It is written for people who are familiar with previous releases of HDF
+and wish to migrate to HDF 4.2.12.
The HDF 4.2.11 documentation can be found on the The HDF Group's website
at:
- http://www.hdfgroup.org/release4/doc/
+ https://www.hdfgroup.org/release4/doc/
First-time HDF users are encouraged to read the HDF FAQ, which can be
reached from the HDF product home page:
-
- http://hdfgroup.org/products/hdf4/
+
+ https://hdfgroup.org/products/hdf4/
If you have any questions or comments, please see the HDF Support page:
- http://hdfgroup.org/services/support.html
+ https://hdfgroup.org/services/support.html
CONTENTS
- New features and changes
-- Configuration
- Support for new platforms and compilers
-- Bugs fixed since HDF 4.2.10
+- Bugs fixed since HDF 4.2.11
-- Configuration
-- Library
-- Utilities
@@ -37,238 +37,173 @@ CONTENTS
New features and changes
========================
Configuration
- =============
- - None
-
-
-Support for new platforms and compilers
-=======================================
- - None
-
-
-Bugs fixed since HDF 4.2.10
-=========================
- Configuration
=============
- - Windows installer incorrect display of PATH environment variable.
-
- In the Windows installer, the dialog box where the user can elect to
- add the product's bin path to the %PATH% environment variable displayed
- an incorrect path. This path was missing the C:\Program Files part
- and used the POSIX file separator '/' before the bin (<path>/bin,
- instead of <path>\bin).
-
- The dialog box text was changed to simply say that the product's bin
- path would be added instead of explicitly displaying the path.
- This is in line with most installers. The reason for not fixing the
- displayed path instead is that it is difficult to pass the correct
- path from CPack to the NSIS installer for display.
-
- Note that this was never a code issue - it was just a display
- problem. The installer always did the right thing when updating the
- environment variable.
-
- (DER - 2014/11/14, HDFFV-9016)
+ - None
Library
- =========
- - Warning "array subscript is below array bounds"
+ =========
+ - Behavior of HDstrdup changed
+ HDstrdup now checks the input string for NULL. (BMR, 2016/05/12)
- Applied user's patch to remove the warning.
+ - Behavior of SDsetexternalfile changed
+ Previously, when SDsetexternalfile was called more than once on a data
+ set, the library would repeatedly store the external file information in
+ the main file, at different offsets. SDsetexternalfile is now fixed to
+ cause no effect when the data set is already external. (BMR, 2016/05/30)
- (BMR 2014/06/02, HDFFR-1379)
+ - Added new utility function HDisnetcdf64 for use in tools
+ HDisnetcdf64 returns TRUE(1) or FALSE(0) if a file is a netCDF 64-bit
+ file or not.
+ intn HDisnetcdf64(const char *filename)
+ (BMR, 2016/06/14)
Utilities
- =========
- - Detection of read failure in ncdump
+ =========
+ - hdp: detection of netCDF 64-bit file
+ The utility hdp simply failed when the input file was a netCDF 64-bit
+ file. It now reports that it cannot read a netCDF 64-bit file then
+ exits or continue to the next input file.
+ (BMR, 2016/06/14)
- Previously, ncdump did not detect failure from ncvarget because the
- returned value from ncvarget was not checked, and the calling function
- simply returned 0.
- The error code ERR_READFAIL (-2) is added to ncdump only to indicate this
- failure within ncdump, which will display this message:
-
- "Reading failed for variable <Variable name>, the data is possibly corrupted."
-
- (BMR 2015/01/21, HDFFR-1468)
-
- - Improvement of the ncgen's usage statement.
-
- Previously, ncgen's usage looked like this:
- ncgen: -: Usage: ncgen [-V] [ -b ] [ -c ] [ -f ] [ -o outfile] [ file... ]
-
- More details are added to the usage to improve clarity. Now, it is more
- clear, and consistent with ncdump, like this:
- Usage: ncgen [-V] [ -b ] [ -c ] [ -f ] [ -o outfile] [ file ... ]
- [-V] Display version of the HDF4 library and exit
- [-b] For binary netCDF output, '.nc' extension
- [-c] For C output
- [-f] For Fortran output
- [-o <outfile>] Explicitly specify output file name
-
- (BMR 2015/01/19, HDFFR-1459)
-
- - Output of hrepack containing an unnecessary vgroup of class RIG0.0
-
- When the input file did not have any GR elements, hrepack still opened and
- closed the output file using the GR API, which caused the RIG0.0 vgroup to
- be written to the output file.
+Support for new platforms and compilers
+=======================================
+ - Support for Mac OS X Yosemeti 10.10 added (AKC - 2015/03/04, HDFFR-1500)
- Hrepack now skips accessing the output file using GR API, when the input
- file doesn't have any images and any GR attributes.
+ - Support for Mac OS X El Capitan 10.11 added (AKC - 2015/11/18, HDFFR-1425)
+ (BMR, added for AKC, 2016/06/24)
- (BMR 2015/01/18, HDFFR-1428)
+ Java Wrapper Library
+ --------------------
- - Compliance with Fedora standard regarding printf/fprintf statements
+ The Java HDF JNI library has been integrated into the HDF repository.
+ The configure option is "--enable-java", and the CMake option is
+ HDF4_BUILD_JAVA:BOOL=ON. The package hierarchy has changed from the
+ HDF 4 JNI, which was "ncsa.hdf.hdflib", to HDF 4.2.12,
+ "hdf.hdflib".
- Users sent patches for the problem where the format string is missing from
- the printf/fprintf statements. This is in compliance with Fedora standard.
- For more information, see
- https://fedoraproject.org/wiki/Format-Security-FAQ.
- In the context where this problem occurred, the benefit of using puts/fputs
- over printf/fprintf is insignificant. Thus, the fix was adding "%s" to
- those printf/fprintf statements that don't have the format string instead
- of switching to puts/fputs.
+Bugs fixed since HDF 4.2.11
+=========================
+ Configuration
+ =============
+ - Examples from mfhdf and hdf will now be installed according to $DESTDIR when
+ it is supplied. (LRK, 2016/06/29, HDFFR-1491)
- (BMR 2014/12/16, HDFFR-1423 and HDFFR-1475)
+ Library
+ =========
+ - SDsetexternalfile on special elements
+ When the data element is already special, incorrect data length was used
+ for the element in subsequent calls to SDsetexternalfile, causing
+ failures sometimes. This is now fixed. (BMR, 2016/01/04, HDFFR-1516)
- - Failure of hdp on some hdfeos generated files
- Attribute vdatas created by hdfeos API have the field named "AttrValues".
- The utility functions Vattrhdfsize and VSattrhdfsize, in hdp.c, used
- ATTR_FIELD_NAME ("VALUES") to verify that a vdata is storing an attribute,
- causing failure on some hdfeos generated files. In addition, when this
- failure occurred, the calling function tried to free allocated resources
- prematurely.
+ Utilities
+ =========
+ - None
- The check against ATTR_FIELD_NAME and the premature resource deallocation
- are removed.
- (BMR 2014/12/08, HDFFR-1471)
+Documentation
+=============
+ - In addition to minor improvements in the contents, user documentation
+ have a new format to improve usability.
+ (BMR, 2016/06/24)
- - nclong versus long in tests
- Applied the user's patch to remove a test failure.
+Platforms tested
+================
+This version has been tested in the following platforms:
- (BMR 2014/10/21, HDFFR-1378)
+(Format:
+ uname -s, uname -r
+ uname -v, uname -p, uname -m)
-Documentation
-=============
- - Updated Reference Manual and User's Guide
- The documents were updated to contain information of the changes to
- the tools. In addition, various improvements were applied.
+ Linux 2.6.32-573.22.1.el6.x86_64 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-16)
+ #1 SMP, x86_64 GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-16)
+ (mayll/platypus) icc (ICC) 15.0.3.187 Build 20150407
+ ifort (IFORT) 15.0.3.187 Build 20150407
+ pgcc and pgf90 15.7-0 64-bit target on x86-64 Linux -tp nehalem
- (BMR 2015/2/04)
+ Linux, 3.10.0-327.10.1.el7.x86_64 gcc (GCC) 4.8.5 20150623 (Red Hat 4.8.5-4)
+ #1 SMP x86_64, GNU/Linux GNU Fortran (GCC) 4.8.5 20150623 (Red Hat 4.8.5-4)
+ (kituo/moohan) icc (ICC) 15.0.3.187 Build 20150407
+ ifort (IFORT) 15.0.3.187 Build 20150407
+ Linux, 2.6.32-573.18.1.el6.ppc64 (1) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-11)
+ #1 SMP, ppc64 (ostrich) (2) IBM XL Fortran for Linux, V15.1 (64-bit mode)
+ SunOS 5.11 (32- and 64-bit) Sun C 5.12 SunOS_sparc 2011/11/16
+ 11.1, sparc, sun4v (emu) Sun Fortran 95 8.6 SunOS_sparc 2011/11/16
-Platforms tested
-================
+ Windows 7 Visual Studio 2012 w/ Intel Fortran 15 (cmake)
+ Visual Studio 2013 w/ Intel Fortran 15 (cmake)
+ Visual Studio 2015 w/ Intel Fortran 16 (cmake)
+ Cygwin(CYGWIN_NT-6.1 2.2.1(0.289/5/3)
+ gcc(4.9.3) compiler and gfortran)
+ (cmake and autotools)
-This version has been tested in the following platforms:
+ Windows 7 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake)
+ Visual Studio 2013 w/ Intel Fortran 15 (cmake)
+ Visual Studio 2015 w/ Intel Fortran 16 (cmake)
- Linux 2.6.32-358.18.1 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-11)
- .el6.ppc64 #1 GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-11)
- SMP ppc64 GNU/Linux IBM XL Fortran for Linux, V15.1 (64-bit mode)
- (ostrich)
-
- Linux 2.6.18-308.13.1.el5 #1 gcc (GCC) 4.1.2 20080704 (Red Hat 4.1.2-55)
- SMP i686 i386 GNU Fortran (GCC) 4.1.2 20080704
- (jam) (Red Hat 4.1.2-55)
- pgcc and pgf90 14.10-0 32-bit target
- on x86 Linux -tp penryn
- Intel(R) C Compiler, Version 15.0.1 20141022
- Intel(R) Fortran Compiler, Version 15.0.1
-
- Linux 2.6.18-398.el5 #1 gcc (GCC) 4.1.2 20080704 (Red Hat 4.1.2-55)
- SMP x86_64 GNU/Linux GNU Fortran (GCC) 4.1.2 20080704
- (koala) (Red Hat 4.1.2-55)
- icc (ICC) 15.0.1 20141022
- ifort (IFORT) 15.0.1 20141022
-
- Linux 2.6.32-504.1.3.el6 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-11)
- #1 SMP x86_64 GNU/Linux GNU Fortran (GCC) 4.4.7 20120313
- (platypus) (Red Hat 4.4.7-11)
- icc (ICC) 15.0.1 20141022
- ifort (IFORT) 15.0.1 20141022
- pgcc and pgf90 14.10-0 64-bit target
- on x86-64 Linux -tp nehalem
-
- Linux 3.10.0-123.8.1.el7 gcc (GCC) 4.8.2 20140120 (Red Hat 4.8.2-16)
- #1 SMP x86_64 GNU/Linux GNU Fortran (GCC) 4.8.2 20140120
- (aws ec2 CentOS 7 image) (Red Hat 4.8.2-16)
-
- SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc 2011/11/16
- (emu) (see "Known problem" section)
- Sun Fortran 95 8.6 SunOS_sparc 2011/11/16
-
- Windows 7 Visual Studio 2008 (cmake)
- Visual Studio 2010 w/ Intel Fortran 14 (cmake)
- Visual Studio 2012 w/ Intel Fortran 14 (cmake)
- Visual Studio 2013 w/ Intel Fortran 14 (cmake)
- Cygwin(CYGWIN_NT-6.1 1.7.32(0.274/5/3) gcc(4.8.3) compiler and gfortran)
- (cmake and autotools)
+ Windows 8.1 Visual Studio 2012 w/ Intel Fortran 15 (cmake)
+ Visual Studio 2013 w/ Intel Fortran 15 (cmake)
- Windows 7 x64 Visual Studio 2008 (cmake)
- Visual Studio 2010 w/ Intel Fortran 14 (cmake)
- Visual Studio 2012 w/ Intel Fortran 14 (cmake)
- Visual Studio 2013 w/ Intel Fortran 14 (cmake)
-
- Windows 8.1 Visual Studio 2012 w/ Intel Fortran 14 (cmake)
- Visual Studio 2013 w/ Intel Fortran 14 (cmake)
+ Windows 8.1 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake)
+ Visual Studio 2013 w/ Intel Fortran 15 (cmake)
- Windows 8.1 x64 Visual Studio 2012 w/ Intel Fortran 14 (cmake)
- Visual Studio 2013 w/ Intel Fortran 14 (cmake)
+ Mac OS X 10.8.5, Darwin, 12.6.0 Apple clang version 5.1 from Xcode 5.1
+ 12.6.0, x86_64 gfortran GNU Fortran (GCC) 4.8.2
+ (swallow,kite) Intel icc and ifort Version 15.0.3
- Mac OS X 10.7.5 Apple clang version 3.0 from Xcode 4.6.1
- Darwin 11.4.2 gfortran GNU Fortran (GCC) 4.8.2
- (duck) icc and ifort Version 13.0.3 20130606
+ Mac OS X 10.9.5, Darwin, 13.4.0 Apple clang version 6.0 from Xcode 6.2
+ 13.4.0, x86_64 gfortran GNU Fortran (GCC) 4.9.2
+ (wren,quail) Intel icc and ifort Version 15.0.3
- Mac OS X 10.8.5 Apple clang version 5.1 from Xcode 5.1
- Darwin 12.5.0 gfortran GNU Fortran (GCC) 4.8.2
- (swallow,kite) icc and ifort Version 14.0.4 20140805
+ Mac OS X 10.10.5, Darwin, 14.5.0 Apple clang version 6.0 from Xcode 7.0
+ 14.5.0, x86_64 666666 gfortran GNU Fortran (GCC) 4.9.2
+ (osx1010dev) Intel icc and ifort version 15.0.3
- Mac OS X 10.9.5 Apple clang version 6.0 from Xcode 6.0.1
- Darwin 13.4.0 gfortran GNU Fortran (GCC) 4.8.2
- (wren,quail) icc and ifort Version 15.0.1 20141022
+ Mac OS X 10.11.5, Darwin, 15.4.0 Apple clang version 7.3 from Xcode 7.3
+ 15.5.0, x86_64 666666 gfortran GNU Fortran (GCC) 5.2.0
+ (osx1010dev) Intel icc and ifort version 15.0.3
- Debian7.5.0 3.2.0-4-amd64 #1 SMP Debian 3.2.51-1 x86_64 GNU/Linux
- gcc (Debian 4.7.2-5) 4.7.2
- GNU Fortran (Debian 4.7.2-5) 4.7.2
- (cmake and autotools)
+ Debian7.5.0 3.2.0-4-amd64
+ #1 SMP Debian 3.2.51-1, x86_64 GNU/Linux
+ gcc (Debian 4.7.2-5) 4.7.2
+ GNU Fortran (Debian 4.7.2-5) 4.7.2
+ (cmake and autotools)
- Fedora20 3.15.3-200.fc20.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux
- gcc (GCC) 4.8.3 20140624 (Red Hat 4.8.3-1)
- GNU Fortran (GCC) 4.8.3 20140624 (Red Hat 4.8.3-1)
- (cmake and autotools)
+ Fedora20 3.15.3-200.fc20.x86_64
+ #1 SMP x86_64 GNU/Linux gcc (GCC) 4.8.3 20140624 (Red Hat 4.8.3-1)
+ GNU Fortran (GCC) 4.8.3 20140624 (Red Hat 4.8.3-1)
+ (cmake and autotools)
- SUSE 13.1 3.11.10-17-desktop #1 SMP PREEMPT x86_64 x86_64 x86_64 GNU/Linux
- gcc (SUSE Linux) 4.8.1
- GNU Fortran (SUSE Linux) 4.8.1
- (cmake and autotools)
+ SUSE 13.1 3.11.10-17-desktop
+ #1 SMP PREEMPT x86_64 GNU/Linux gcc (SUSE Linux) 4.8.1
+ GNU Fortran (SUSE Linux) 4.8.1
+ (cmake and autotools)
- Ubuntu 14.04 3.13.0-35-generic #62-Ubuntu SMP x86_64 GNU/Linux
- gcc (Ubuntu/Linaro 4.9.1-0ubuntu1) 4.9.1
- GNU Fortran (Ubuntu/Linaro 4.9.1-0ubuntu1) 4.9.1
- (cmake and autotools)
+ Ubuntu 14.04 3.13.0-35-generic
+ #62-Ubuntu SMP x86_64 GNU/Linux gcc (Ubuntu/Linaro 4.9.1-0ubuntu1) 4.9.1
+ GNU Fortran (Ubuntu/Linaro 4.9.1-0ubuntu1) 4.9.1
+ (cmake and autotools)
Known problems
==============
o Several Fortran examples print "^@" when displaying strings (for example,
- names of the attributes). This happens because Fortran application
- doesn't know the length of the strings passed from the C library.
+ names of the attributes). This happens because Fortran application
+ doesn't know the length of the strings passed from the C library.
EIP - 2015-01-11, HDFFR-1477
o CMake builds in Windows uses the same pre-generated ncgen*.[ch] files from
the yacc/lex input files. The generated file, ncgenyy.c, uses the <unistd.h>
header file that Windows does not support. This must be blocked out in
- order for Windows to use it.
+ order for Windows to use it.
AKC 2014-02-03, HDFFR-1424
o CMake "make install" fails installing the tools:
@@ -276,22 +211,23 @@ o CMake "make install" fails installing the tools:
ADB - 2014/02/03
o CMake does not install these man pages:
- hdf.1, ncdump.1, ncgen.1
+ hdf.1, ncdump.1, ncgen.1
AKC/BMR - 2014/02/02
-o For Mac OS X 10.7 Lion and on 10.8 Mountain Lion, several tests fail with
- GCC, Intel and Clang compilers. Currently, this situation is detected and
- -O0 level optimization is used.
- (HDFFR-1318,1358) EIP - 2013/02/05
+o For Mac OS X 10.7 Lion, 10.8 Mountain Lion, 10.9 Mavericks, 10.10 Yosemite,
+ and 10.11 El Capitan, when compiling with -O2, some xdr functions might cause
+ memory corruption. This happened for GCC, Intel and Clang compilers.
+ Currently, -O0 level optimization is used to avoid this problem.
+ (HDFFR-1318,1327,1358,1425) EIP - 2013/02/05, BMR - 2016/06/24
-o On IBM PowerPC 64, hdftest fails when gcc 4.4.6 is used with -O3 optimization
- level.
+o On IBM PowerPC 64, hdftest fails when gcc 4.4.6 is used with -O3 optimization
+ level.
o When building in AIX systems, if CC is xlc with -qlanglvl=ansi, configure
will fail when checking for the jpeglib.h header due to the duplicated
macro definition of HAVE_STDLIB_H. This is because some newer builds
of the jpeg library have HAVE_STDLIB_H defined in the jconfig.h header file.
- Without the -qlanglvl=ansi, some older xlc versions (e.g., V7.0) still
+ Without the -qlanglvl=ansi, some older xlc versions (e.g., V7.0) still
fail, but newer xlc versions (e.g., V9.0) pass. AKC - 2010/02/17
o When building on Linux/UNIX platforms, the szip shared library files must
@@ -301,7 +237,7 @@ o When building on Linux/UNIX platforms, the szip shared library files must
Ex. export LD_LIBRARY_PATH=path_to_szip_lib:$LD_LIBRARY_PATH
Optionally, one can use the static szip library files by adding '-static'
to the CFLAGS environment variable.
-
+
o Existing data written by an HDF4 Library prior to HDF 4.2r2:
When a one-dimensional SDS and a dimension scale have
the same name, subsequent accesses to the dimension scale or to the
@@ -312,8 +248,8 @@ o Existing data written by an HDF4 Library prior to HDF 4.2r2:
HDF4 Library Releases 4.2r2 and later make a distinction between an SDS
and a dimension variable. However, as with older versions, these recent
- versions are unable to detect such conflicts in files created by earlier
- releases. It is therefore STRONGLY recommended to check for such name
+ versions are unable to detect such conflicts in files created by earlier
+ releases. It is therefore STRONGLY recommended to check for such name
duplication before working with data created with a pre-4.2r2 library.
The functions SDgetnumvars_byname and SDnametoindices are provided
@@ -327,8 +263,8 @@ o N-bit compression is not supported with Fortran APIs.
o Using both fill-value and compression on SD datasets does not work.
-o When using PGI compilers, make sure that the JPEG library is also compiled
- with a PGI C compiler; linking with a JPEG library built with gcc causes
+o When using PGI compilers, make sure that the JPEG library is also compiled
+ with a PGI C compiler; linking with a JPEG library built with gcc causes
JPEG library tests to fail. To bypass the problem:
x Set LIBS flag to $PGI_JPEG_INSTALL_DIR/lib/libjpeg.a
@@ -337,7 +273,7 @@ o When using PGI compilers, make sure that the JPEG library is also compiled
setenv LIBS $PGI_JPEG_INSTALL_DIR/lib/libjpeg.a
- x Use the --with-jpeg=$PGI_JPEG_INSTALL_DIR configure flag to
+ x Use the --with-jpeg=$PGI_JPEG_INSTALL_DIR configure flag to
configure with the PGI-compiled JPEG library:
./configure --with-jpeg=$PGI_JPEG_INSTALL_DIR --with-zlib....
diff --git a/release_notes/USING_CMake_Examples.txt b/release_notes/USING_CMake_Examples.txt
index 2167c1f..1bbb3a2 100644
--- a/release_notes/USING_CMake_Examples.txt
+++ b/release_notes/USING_CMake_Examples.txt
@@ -2,38 +2,33 @@
* Build and Test HDF4 Examples with CMake *
************************************************************************
-Notes: This short instruction is written for users who want to quickly
+Notes: This short instruction is written for users who want to quickly
test the installation of HDF4 by using the CMake tools to build
and test the HDF4 Examples. The following instructions will show
the default usage and then present common changes for non-default
installations.
For more information, see the USING_HDF4_CMake.txt file.
-
- More information about using CMake can be found at the KitWare
+
+ More information about using CMake can be found at the KitWare
site, www.cmake.org.
-
+
CMake uses the command line and these instructions use the script
method of the ctest command.
========================================================================
-I. Preconditions
+I. Preconditions
========================================================================
1. We suggest you obtain the latest CMake for windows from the Kitware
- web site. The HDF 4.2.x product requires a minimum CMake version
- of 2.8.12.
-
- 2. You have installed the HDF4 library built with CMake by executing
- the HDF Install Utility (the *.exe file in the binary package for
- Windows or the *.sh on Linux). If you are using a Windows platform,
- you can obtain a pre-built Windows binary from The HDF Group's website
- at www.hdfgroup.org. See Section "III. Common changes to the
- HDF4_Examples.cmake file", for the line to change the location.
-
- 3. On Windows, you have installed the 7Zip package. See Section "III.
- Common changes to the HDF4_Examples.cmake file", for the line to
- change the command.
+ web site. The HDF 4.2.x product requires a minimum CMake version
+ of 3.1.0.
+
+ 2. You have installed the HDF4 library built with CMake by executing
+ the HDF Install Utility (the *.exe or *.msi file in the binary package for
+ Windows or the *.sh on Linux). If you are using a Windows platform,
+ you can obtain a pre-built Windows binary from The HDF Group's website
+ at www.hdfgroup.org.
@@ -42,40 +37,39 @@ II. Building HDF4 Examples with CMake
========================================================================
Files in the HDF4 install directory:
- HDF4Examples-0.1.1-Source.zip (or HDF4Examples-0.1.1-Source.tar.gz)
+ HDF4Examples folder
HDF4_Examples.cmake
-
+
Default installation process:
Create a directory to run the examples, i.e. \test_hdf4.
- Copy HDF4Examples-0.1.1-Source.zip(.tar.gz) to this directory, do NOT unzip.
+ Copy HDF4Examples folder to this directory.
Copy HDF4_Examples.cmake to this directory.
- Edit line 8 of the HDF4_Examples.cmake file and change the INSTALLDIR
- to the HDF4 install location.
-
- Execute from this directory:
- ctest -S HDF4_Examples.cmake,HDF4Examples-0.1.1-Source -C Release -O test.log
-
-The script will uncompress the examples file HDF4Examples-0.1.1-Source.zip(.tar.gz),
- and create a build directory inside the HDF4Examples-0.1.1-Source directory.
- It will then configure, build, and execute the examples. All the log files
- will be found under the build\Testing\Temporary directory, check these for
- errors.
-
-The amount of script information can be increased by adding -V to the ctest
- command. Even more information can be shown by adding -VV instead of -V.
+ The default source folder is defined as "HDF4Examples". It can be changed
+ with the CTEST_SOURCE_NAME script option.
+ The default installation folder is defined for the platform.
+ It can be changed with the INSTALLDIR script option.
+ The default ctest configuration is defined as "Release". It can be changed
+ with the CTEST_BUILD_CONFIGURATION script option. Note that this must
+ be the same as the value used with the -C command line option.
+ The default build configuration is defined to build and use static libraries.
+ Shared libraries can be used with the STATICLIBRARIES script option set to "NO".
+ Other options can be changed by editing the HDF4_Examples.cmake file.
+
+ If the defaults are okay, execute from this directory:
+ ctest -S HDF4_Examples.cmake -C Release -V -O test.log
+ If the defaults need change, execute from this directory:
+ ctest -S HDF4_Examples.cmake,CTEST_SOURCE_NAME=MyExamples,INSTALLDIR=MyLocation -C Release -V -O test.log
+
+When executed, the ctest script will save the results to the log file, test.log, as
+indicated by the ctest command. If you wish the to see more build and test information,
+add "-VV" to the ctest command. The output should show near the end;
+ 100% tests passed, 0 tests failed out of 49.
========================================================================
-III. Common changes to the HDF4_Examples.cmake file
+III. Other changes to the HDF4_Examples.cmake file
========================================================================
-Line 8: change the INSTALLDIR to a different HDF4 install location.
-
-Line 14: uncomment to allow Mac machines to build shared examples.
-
-Line 15: uncomment to build and test Fortran examples.
-
-Line 16: comment to use an existing source directory.
-
-Line 70: change the CTEST_7Z_COMMAND to a different unzip program.
+Line 56-58: uncomment to use a source tarball or zipfile;
+ Add script option "TAR_SOURCE=MySource.tar".
diff --git a/release_notes/USING_HDF4_CMake.txt b/release_notes/USING_HDF4_CMake.txt
index 082eac5..bc41113 100644
--- a/release_notes/USING_HDF4_CMake.txt
+++ b/release_notes/USING_HDF4_CMake.txt
@@ -2,60 +2,60 @@
* Build and Install HDF4 Applications with CMake *
************************************************************************
-Notes: This short instruction is written for users who want to quickly
- build HDF4 applications using the CMake tools. Users can adapt
- these instructions for their own applications. For more information,
+Notes: This short instruction is written for users who want to quickly
+ build HDF4 applications using the CMake tools. Users can adapt
+ these instructions for their own applications. For more information,
see the "Minimum C Project Files for CMake" section.
-
- More information about using CMake can be found at the KitWare
+
+ More information about using CMake can be found at the KitWare
site, www.cmake.org.
-
- CMake uses the command line; however, the visual CMake tool is
+
+ CMake uses the command line; however, the visual CMake tool is
available for the configuration step. The steps are similar for
all of the operating systems supported by CMake.
-
+
NOTES:
- 1. Using CMake for building and using HDF4 is under active
- development. While we have attempted to provide error-free
- files, please understand that development with CMake has not
- been extensively tested outside of HDF. The CMake specific
+ 1. Using CMake for building and using HDF4 is under active
+ development. While we have attempted to provide error-free
+ files, please understand that development with CMake has not
+ been extensively tested outside of HDF. The CMake specific
files may change before the next release.
-
- 2. CMake for HDF4 development should be usable on any system
- where CMake is supported. Please send us any comments on how
- CMake support can be improved on any system.
-
- 3. See the appendix at the bottom of this file for an example
- of using a ctest script for building and testing. See
+
+ 2. CMake for HDF4 development should be usable on any system
+ where CMake is supported. Please send us any comments on how
+ CMake support can be improved on any system.
+
+ 3. See the appendix at the bottom of this file for an example
+ of using a ctest script for building and testing. See
CMake.txt for more information.
========================================================================
-I. Preconditions
+I. Preconditions
========================================================================
1. We suggest you obtain the latest CMake for windows from the Kitware
- web site. The HDF 4.2.x product requires a minimum CMake version
- of 2.8.12.
-
- 2. You have installed the HDF4 library built with CMake, by executing
- the HDF Install Utility (the *.exe file in the binary package for
- Windows). If you are using a Windows platform, you can obtain a
- pre-built Windows binary from The HDF Group's website at
+ web site. The HDF 4.2.x product requires a minimum CMake version
+ of 3.1.0.
+
+ 2. You have installed the HDF4 library built with CMake, by executing
+ the HDF Install Utility (the *.exe file in the binary package for
+ Windows). If you are using a Windows platform, you can obtain a
+ pre-built Windows binary from The HDF Group's website at
www.hdfgroup.org.
- 3. Set the environment variable HDF4_DIR to the installed location of
- the config files for HDF4. On Windows:
+ 3. Set the environment variable HDF4_DIR to the installed location of
+ the config files for HDF4. On Windows:
HDF4_DIR=C:/Program Files/HDF_Group/HDF/4.2.x/cmake/hdf4
-
+
(Note there are no quote characters used on Windows and all platforms
use forward slashes)
4. Created separate source and build directories.
(CMake commands are executed in the build directory)
- 5. Created a CMakeLists.txt file(s) for your source. See Section III
+ 5. Created a CMakeLists.txt file(s) for your source. See Section III
below.
@@ -79,22 +79,22 @@ These steps are described in more detail below.
The visual CMake executable is named "cmake-gui.exe" on Windows and should be
available in your Start menu. For Linux, UNIX, and Mac users the
- executable is named "cmake-gui" and can be found where CMake was
- installed.
-
- Specify the source and build directories. Make the build and source
- directories different. For example on Windows, if the source is at
- c:\MyHDFstuff\hdf4, then use c:\MyHDFstuff\hdf4\build or
- c:\MyHDFstuff\build\hdf4 for the build directory.
-
+ executable is named "cmake-gui" and can be found where CMake was
+ installed.
+
+ Specify the source and build directories. Make the build and source
+ directories different. For example on Windows, if the source is at
+ c:\MyHDFstuff\hdf4, then use c:\MyHDFstuff\hdf4\build or
+ c:\MyHDFstuff\build\hdf4 for the build directory.
+
PREFERRED:
- Users can perform the configuration step without using the visual
- cmake-gui program. The following is an example command line
+ Users can perform the configuration step without using the visual
+ cmake-gui program. The following is an example command line
configuration step executed within the build directory:
-
+
cmake -G "<generator>" [-D<options>] <sourcepath>
-
- Where <generator> is
+
+ Where <generator> is
* MinGW Makefiles
* NMake Makefiles
* Unix Makefiles
@@ -111,63 +111,63 @@ These steps are described in more detail below.
2. Configure the cache settings
- 2.1 Visual CMake users, click the Configure button. If this is the first time you are
- running cmake-gui in this directory, you will be prompted for the
- generator you wish to use (for example on Windows, Visual Studio 10).
- CMake will read in the CMakeLists.txt files from the source directory and
- display options for the HDF4 project. After the first configure you
+ 2.1 Visual CMake users, click the Configure button. If this is the first time you are
+ running cmake-gui in this directory, you will be prompted for the
+ generator you wish to use (for example on Windows, Visual Studio 10).
+ CMake will read in the CMakeLists.txt files from the source directory and
+ display options for the HDF4 project. After the first configure you
can adjust the cache settings and/or specify locations of other programs.
-
+
Any conflicts or new values will be highlighted by the configure
- process in red. Once you are happy with all the settings and there are no
- more values in red, click the Generate button to produce the appropriate
- build files.
-
- On Windows, if you are using a Visual Studio generator, the solution and
+ process in red. Once you are happy with all the settings and there are no
+ more values in red, click the Generate button to produce the appropriate
+ build files.
+
+ On Windows, if you are using a Visual Studio generator, the solution and
project files will be created in the build folder.
-
+
On linux, if you are using the Unix Makefiles generator, the Makefiles will
be created in the build folder.
2.2 Alternative command line example on Windows in c:\MyHDFstuff\hdf4\build directory:
-
+
cmake -G "Visual Studio 11" -DBUILD_TESTING:BOOL=ON -DUSE_SHARED_LIBS:BOOL=ON ..
3. Build HDF4 applications
-
- On Windows, you can build HDF4 applications using either the Visual Studio Environment
+
+ On Windows, you can build HDF4 applications using either the Visual Studio Environment
or the command line. The command line is normally used on linux, Unix, and Mac.
To build from the command line, navigate to your build directory and
execute the following:
-
- cmake --build . --config {Debug | Release}
-
+
+ cmake --build . --config {Debug | Release}
+
NOTE: "--config {Debug | Release}" may be optional on your platform. We
recommend choosing either Debug or Release on Windows. If you are
- using the pre-built binaries from HDF, use Release.
-
- 3.1 If you wish to use the Visual Studio environment, open the solution
- file in your build directory. Be sure to select either Debug or
+ using the pre-built binaries from HDF, use Release.
+
+ 3.1 If you wish to use the Visual Studio environment, open the solution
+ file in your build directory. Be sure to select either Debug or
Release and build the solution.
-
+
4. Test HDF4 Applications
To test the build, navigate to your build directory and execute:
-
+
ctest . -C {Debug | Release}
-
+
NOTE: "-C {Debug | Release}" may be optional on your platform. We
recommend choosing either Debug or Release to match the build
- step on Windows.
+ step on Windows.
- 5. The files that support building with CMake are all of the files in the
- config/cmake folder, the CMakeLists.txt files in each source folder, and
- CTestConfig.cmake. CTestConfig.cmake is specific to the internal testing
- performed by The HDF Group. It should be altered for the user's
+ 5. The files that support building with CMake are all of the files in the
+ config/cmake folder, the CMakeLists.txt files in each source folder, and
+ CTestConfig.cmake. CTestConfig.cmake is specific to the internal testing
+ performed by The HDF Group. It should be altered for the user's
installation and needs. The cacheinit.cmake file settings are used by
- The HDF Group for daily testing. It should be altered/ignored for the user's
- installation and needs.
+ The HDF Group for daily testing. It should be altered/ignored for the user's
+ installation and needs.
@@ -175,22 +175,24 @@ These steps are described in more detail below.
III. Minimum C Project Files for CMake
========================================================================
-Create a CMakeLists.txt file at the source root. Include the
+Create a CMakeLists.txt file at the source root. Include the
following text in the file:
##########################################################
-cmake_minimum_required (VERSION 2.8.12)
+cmake_minimum_required (VERSION 3.1)
PROJECT (HDF4MyApp C CXX)
find_package (HDF4 NAMES hdf4)
# find_package (HDF4) # Find non-cmake built HDF4
INCLUDE_DIRECTORIES (${HDF4_INCLUDE_DIR})
set (LINK_LIBS ${LINK_LIBS} ${HDF4_LIBRARIES})
+set (LIB_TYPE STATIC) # or SHARED
set (example hdf_example)
add_executable (${example} ${PROJECT_SOURCE_DIR}/${example}.c)
-TARGET_C_PROPERTIES (${example} " " " ")
+TARGET_NAMING (${example} ${LIB_TYPE})
+TARGET_C_PROPERTIES (${example} ${LIB_TYPE} " " " ")
target_link_libraries (${example} ${LINK_LIBS})
ENABLE_TESTING ()
@@ -213,10 +215,10 @@ Windows should adjust the forward slash to double backslashes, except for
the HDF_DIR environment variable.
NOTE: these files are available at the HDF web site:
- http://www.hdfgroup.org/release4/cmakebuild.html
+ https://www.hdfgroup.org/release4/cmakebuild.html
CTestScript.cmake
-
+
HDF4ExamplesWindowsbinaryCMake.cmake
@@ -226,13 +228,13 @@ ctest
========================================================================
############################################################################
-# Product specific script, HDF4Example.cmake, that uses the
+# Product specific script, HDF4Example.cmake, that uses the
# CTestScript.cmake file (see Appendix in the CMake.txt). Usage:
# "ctest -S HDF4Example.cmake,hdf4Examples -C Release -O hdf4EX.log"
# where hdf4Examples is the source folder relative to the location of these scripts
############################################################################
-cmake_minimum_required(VERSION 2.8.12 FATAL_ERROR)
+cmake_minimum_required(VERSION 3.1 FATAL_ERROR)
set(CTEST_DASHBOARD_ROOT ${CTEST_SCRIPT_DIRECTORY})
set(CTEST_SOURCE_NAME ${CTEST_SCRIPT_ARG})
@@ -254,12 +256,12 @@ set(SITE_COMPILER_NAME "compiler name")
set(SITE_COMPILER_VERSION "compiler version")
# needed for source updates, change as required
-set(REPOSITORY_URL "http://svn.hdfgroup.uiuc.edu/hdf4-examples/trunk")
+set(REPOSITORY_URL "https://svn.hdfgroup.org/hdf4-examples/trunk")
set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DCMAKE_INSTALL_PREFIX:PATH=/usr/local/hdf4Examples-4.2")
# location of the installed hdf4 (cmake configuration folder)
-set(ENV{HDF4_DIR} "/usr/share/cmake/hdf4")
+set(ENV{HDF4_DIR} "/usr/share/cmake")
include(${CTEST_SCRIPT_DIRECTORY}/CTestScript.cmake)
diff --git a/release_notes/USING_HDF4_VS.txt b/release_notes/USING_HDF4_VS.txt
index 829648a..7b68ed5 100644
--- a/release_notes/USING_HDF4_VS.txt
+++ b/release_notes/USING_HDF4_VS.txt
@@ -76,11 +76,11 @@ Using Visual Studio 2008 with HDF4 Libraries built with Visual Studio 2008
Many other common questions and hints are located online and being updated
in the HDF4 FAQ. For Windows-specific questions, please see:
- http://www.hdfgroup.org/windows/faq.html
+ https://www.hdfgroup.org/windows/faq.html
For all other general questions, you can look in the general FAQ:
- http://hdfgroup.org/HDF4-FAQ.html
+ https://hdfgroup.org/HDF4-FAQ.html
************************************************************************
Please send email to help at hdfgroup.org for further assistance.
diff --git a/release_notes/misc_docs.txt b/release_notes/misc_docs.txt
index 9b25af2..09f749f 100644
--- a/release_notes/misc_docs.txt
+++ b/release_notes/misc_docs.txt
@@ -333,7 +333,7 @@ datasets are. The "rules" for writing to a compressed dataset are as follows:
compressed data which is not stored in "chunked" form. This is due to
compression algorithms not being suitable for "local" modifications in a
compressed datastream. Please send questions about compression to the
-general HDF support e-mail address: hdfhelp at ncsa.uiuc.edu
+general HDF support e-mail address: help at hdfgroup.org
Compression for HDF SDS
The SDsetcompress and SDsetnbitdataset functions are used as
@@ -870,7 +870,7 @@ policy:
8. A new utility will be written to convert "DimVal0.1" to "DimVal0.0"
for special cases.
-Please send bug reports, comments and suggestions to hdfhelp at ncsa.uiuc.edu.
+Please send bug reports, comments and suggestions to help at hdfgroup.org.
============================================================================
@@ -1476,12 +1476,12 @@ First-time HDF users are encouraged to read the FAQ in this release for
more information about HDF. Users can also look at the home page for HDF
at:
- http://hdf.ncsa.uiuc.edu/
+ https://www.hdfgroup.org/
Please send questions, comments, and recommendations regarding the
Macintosh version of the HDF library to:
- hdfhelp at ncsa.uiuc.edu
+ help at hdfgroup.org
============================================================================
@@ -3515,11 +3515,11 @@ First-time HDF users are encouraged to read the FAQ in this release for
more information about HDF. Users can also look at the home page for HDF
at:
- http://hdf.ncsa.uiuc.edu/
+ https://www.hdfgroup.org/
Please send questions, comments, and recommendations regarding the Windows
version of the HDF library to:
- hdfhelp at ncsa.uiuc.edu
+ help at hdfgroup.org
============================================================================
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-grass/hdf4.git
More information about the Pkg-grass-devel
mailing list