[nco] 01/05: Imported Upstream version 4.6.3
Bas Couwenberg
sebastic at debian.org
Fri Dec 23 22:55:36 UTC 2016
This is an automated email from the git hooks/post-receive script.
sebastic pushed a commit to branch master
in repository nco.
commit 883e0aed0af76010d47166341a8b06a669863529
Author: Bas Couwenberg <sebastic at xs4all.nl>
Date: Fri Dec 23 23:28:41 2016 +0100
Imported Upstream version 4.6.3
---
CMakeLists.txt | 164 +++++++
bld/Makefile | 14 +
bld/nco.spec | 11 +-
bld/nco_dst.pl | 30 +-
bm/NCO_rgr.pm | 83 +++-
configure | 20 +-
configure.ac | 2 +-
configure.eg | 21 +-
data/in.cdl | 2 +-
data/in_4.cdl | 14 +-
data/ncclimo | 1199 ++++++++++++++++++++++++++-------------------
data/ncremap | 172 ++++---
data/{ncclimo => ncsplit} | 627 +++++++++++++-----------
data/tst-udunits.nco | 136 +++++
debian/changelog | 6 +
debian/doc-base | 2 +-
doc/ANNOUNCE | 182 ++++---
doc/ChangeLog | 147 +++++-
doc/MANIFEST | 12 +-
doc/README | 2 +-
doc/TODO | 7 +-
doc/VERSION | 2 +-
doc/debian.txt | 162 +++---
doc/highlights_old.shtml | 2 +-
doc/index.shtml | 100 ++--
doc/nco.texi | 716 +++++++++++++++++++++------
doc/nco_news.shtml | 18 +-
doc/nco_src_frg.txt | 2 +-
doc/xmp_cesm.html | 2 +-
man/ncap.1 | 4 +-
man/ncap2.1 | 6 +-
man/ncatted.1 | 4 +-
man/ncbo.1 | 4 +-
man/ncclimo.1 | 10 +-
man/ncecat.1 | 4 +-
man/nces.1 | 4 +-
man/ncflint.1 | 4 +-
man/ncks.1 | 8 +-
man/nco.1 | 6 +-
man/ncpdq.1 | 4 +-
man/ncra.1 | 4 +-
man/ncrcat.1 | 4 +-
man/ncremap.1 | 4 +-
man/ncrename.1 | 6 +-
man/ncwa.1 | 4 +-
src/nco++/Makefile.old | 45 +-
src/nco++/fmc_all_cls.cc | 423 +++++++++++-----
src/nco++/fmc_all_cls.hh | 11 +
src/nco++/fmc_gsl_cls.cc | 40 +-
src/nco++/fmc_gsl_cls.hh | 18 +-
src/nco++/ncap2.cc | 12 +-
src/nco++/ncap2_utl.cc | 37 ++
src/nco++/ncap2_utl.hh | 7 +
src/nco++/prs_cls.cc | 25 +-
src/nco/Makefile.am | 2 +
src/nco/Makefile.in | 14 +-
src/nco/mpncbo.c | 1 +
src/nco/mpncecat.c | 1 +
src/nco/mpncflint.c | 1 +
src/nco/mpncpdq.c | 1 +
src/nco/mpncra.c | 1 +
src/nco/mpncwa.c | 3 +-
src/nco/ncap.c | 1 +
src/nco/ncap_lex.l | 1 +
src/nco/ncap_yacc.y | 8 +
src/nco/ncbo.c | 3 +-
src/nco/ncecat.c | 3 +-
src/nco/ncflint.c | 3 +-
src/nco/ncks.c | 33 +-
src/nco/nco.h | 15 +-
src/nco/nco_att_utl.c | 2 +-
src/nco/nco_cln_utl.c | 829 +++++++++++++++++--------------
src/nco/nco_cln_utl.h | 95 +++-
src/nco/nco_cnk.c | 21 +
src/nco/nco_cnk.h | 1 +
src/nco/nco_ctl.c | 8 +-
src/nco/nco_fl_utl.c | 2 +-
src/nco/nco_lmt.c | 63 ++-
src/nco/nco_netcdf.c | 5 +-
src/nco/nco_netcdf.h | 3 +
src/nco/nco_pck.c | 2 +-
src/nco/nco_prn.c | 87 +++-
src/nco/nco_sng_utl.h | 7 +
src/nco/ncpdq.c | 3 +-
src/nco/ncra.c | 28 +-
src/nco/ncwa.c | 5 +-
86 files changed, 3865 insertions(+), 1942 deletions(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
new file mode 100644
index 0000000..c2abe08
--- /dev/null
+++ b/CMakeLists.txt
@@ -0,0 +1,164 @@
+#CMake build, Pedro Vicente
+#options
+#NETCDF_INCLUDE
+#NETCDF_LIBRARY
+#HDF5_LIBRARY
+#HDF5_LIBRARY
+#HDF5_HL_LIBRARY
+#ZLIB_LIBRARY
+#SZIP_LIBRARY
+#CURL_LIBRARY
+
+#Logic for finding header files and library dependencies:
+#find_path
+#find_library
+#are used (with optional location hints); the first argument contains the found file if found.
+#and the message "-- Found file at location" is made
+#if not found the first argument variable is the same as a user option that contains the file location full path.
+#the same message "-- Found file at location" is made
+#but there is not an attempt at this time to validate the user input file (.h or .a)
+
+cmake_minimum_required(VERSION 2.8)
+project (nco)
+set(BUILD_SHARED_LIBRARIES OFF)
+
+find_path(NETCDF_INCLUDE_FOUND netcdf.h HINTS ${NETCDF_INCLUDE} "/usr/include")
+if(NOT NETCDF_INCLUDE_FOUND)
+ message(FATAL_ERROR "netcdf.h header file not found")
+else()
+ message("-- Found netcdf.h header file at: " ${NETCDF_INCLUDE_FOUND})
+endif()
+
+find_library(NETCDF_LIBRARY NAMES netcdf)
+if(NOT NETCDF_LIBRARY)
+ message(FATAL_ERROR "netcdf library not found")
+else()
+ message("-- Found netcdf library at: " ${NETCDF_LIBRARY})
+endif()
+
+find_library(HDF5_LIBRARY NAMES hdf5)
+if(NOT HDF5_LIBRARY)
+ message(FATAL_ERROR "hdf5 library not found")
+else()
+ message("-- Found hdf5 library at: " ${HDF5_LIBRARY})
+endif()
+
+find_library(HDF5_HL_LIBRARY NAMES hdf5_hl)
+if(NOT HDF5_HL_LIBRARY)
+ message(FATAL_ERROR "hdf5 high level library not found")
+else()
+ message("-- Found hdf5 high level library at: " ${HDF5_HL_LIBRARY})
+endif()
+
+find_library(ZLIB_LIBRARY NAMES z)
+if(NOT ZLIB_LIBRARY)
+ message(FATAL_ERROR "zlib library not found")
+else()
+ message("-- Found zlib library at: " ${ZLIB_LIBRARY})
+endif()
+
+find_library(SZIP_LIBRARY NAMES sz)
+if(NOT SZIP_LIBRARY)
+ message(FATAL_ERROR "szip library not found")
+else()
+ message("-- Found szip library at: " ${SZIP_LIBRARY})
+endif()
+
+find_library(CURL_LIBRARY NAMES curl)
+if(NOT CURL_LIBRARY)
+ message(FATAL_ERROR "curl library not found")
+else()
+ message("-- Found curl library at: " ${CURL_LIBRARY})
+endif()
+
+if (MSVC)
+ add_definitions( "/D_CRT_SECURE_NO_WARNINGS" )
+ if (STATIC_CRT)
+ message("-- Building with static runtime library")
+ set(CMAKE_CXX_FLAGS_DEBUG "/D_DEBUG /MTd /Zi /Ob0 /Od /RTC1")
+ set(CMAKE_CXX_FLAGS_MINSIZEREL "/MT /O1 /Ob1 /D NDEBUG")
+ set(CMAKE_CXX_FLAGS_RELEASE "/MT /O2 /Ob2 /D NDEBUG")
+ set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "/MT /Zi /O2 /Ob1 /D NDEBUG")
+ endif()
+ set(WINSOCK_LIBRARY ws2_32.lib winmm.lib wldap32.lib)
+endif(MSVC)
+
+if (CMAKE_VERSION VERSION_LESS "3.1")
+ if (CMAKE_C_COMPILER_ID STREQUAL "GNU")
+ set (CMAKE_C_FLAGS "--std=gnu99 ${CMAKE_C_FLAGS}")
+ endif ()
+else ()
+ set (CMAKE_C_STANDARD 99)
+endif ()
+
+include_directories(${NETCDF_INCLUDE_FOUND})
+
+set(LIB_DEP ${LIB_DEP} ${NETCDF_LIBRARY} ${HDF5_HL_LIBRARY} ${HDF5_LIBRARY})
+set(LIB_DEP ${LIB_DEP} ${CURL_LIBRARY} ${ZLIB_LIBRARY} ${SZIP_LIBRARY})
+set(LIB_DEP ${LIB_DEP} ${WINSOCK_LIBRARY} m)
+
+set(SRC ${SRC} src/nco/libnco.h)
+set(SRC ${SRC} src/nco/nco_att_utl.c src/nco/nco_att_utl.h)
+set(SRC ${SRC} src/nco/nco_aux.c src/nco/nco_aux.h)
+set(SRC ${SRC} src/nco/nco_bnr.c src/nco/nco_bnr.h)
+set(SRC ${SRC} src/nco/nco_cln_utl.c src/nco/nco_cln_utl.h)
+set(SRC ${SRC} src/nco/nco_cnf_dmn.c src/nco/nco_cnf_dmn.h)
+set(SRC ${SRC} src/nco/nco_cnf_typ.c src/nco/nco_cnf_typ.h)
+set(SRC ${SRC} src/nco/nco_cnk.c src/nco/nco_cnk.h)
+set(SRC ${SRC} src/nco/nco_cnv_arm.c src/nco/nco_cnv_arm.h)
+set(SRC ${SRC} src/nco/nco_cnv_csm.c src/nco/nco_cnv_csm.h)
+set(SRC ${SRC} src/nco/nco_ctl.c src/nco/nco_ctl.h)
+set(SRC ${SRC} src/nco/nco_dbg.c src/nco/nco_dbg.h)
+set(SRC ${SRC} src/nco/nco_dmn_utl.c src/nco/nco_dmn_utl.h)
+set(SRC ${SRC} src/nco/nco_fl_utl.c src/nco/nco_fl_utl.h)
+set(SRC ${SRC} src/nco/nco_getopt.c src/nco/nco_getopt.h)
+set(SRC ${SRC} src/nco/nco_grp_trv.c src/nco/nco_grp_trv.h)
+set(SRC ${SRC} src/nco/nco_grp_utl.c src/nco/nco_grp_utl.h)
+set(SRC ${SRC} src/nco/nco.h)
+set(SRC ${SRC} src/nco/nco_lmt.c src/nco/nco_lmt.h)
+set(SRC ${SRC} src/nco/nco_lst_utl.c src/nco/nco_lst_utl.h)
+set(SRC ${SRC} src/nco/nco_md5.c src/nco/nco_md5.h)
+set(SRC ${SRC} src/nco/nco_mmr.c src/nco/nco_mmr.h)
+set(SRC ${SRC} src/nco/nco_mpi.h)
+set(SRC ${SRC} src/nco/nco_msa.c src/nco/nco_msa.h)
+set(SRC ${SRC} src/nco/nco_mss_val.c src/nco/nco_mss_val.h)
+set(SRC ${SRC} src/nco/nco_mta.c src/nco/nco_mta.h)
+set(SRC ${SRC} src/nco/nco_netcdf.c src/nco/nco_netcdf.h)
+set(SRC ${SRC} src/nco/nco_omp.c src/nco/nco_omp.h)
+set(SRC ${SRC} src/nco/nco_pck.c src/nco/nco_pck.h)
+set(SRC ${SRC} src/nco/nco_ppc.c src/nco/nco_ppc.h)
+set(SRC ${SRC} src/nco/nco_prn.c src/nco/nco_prn.h)
+set(SRC ${SRC} src/nco/nco_rec_var.c src/nco/nco_rec_var.h)
+set(SRC ${SRC} src/nco/nco_rgr.c src/nco/nco_rgr.h)
+set(SRC ${SRC} src/nco/nco_rth_flt.c src/nco/nco_rth_flt.h)
+set(SRC ${SRC} src/nco/nco_rth_utl.c src/nco/nco_rth_utl.h)
+set(SRC ${SRC} src/nco/nco_scl_utl.c src/nco/nco_scl_utl.h)
+set(SRC ${SRC} src/nco/nco_scm.c src/nco/nco_scm.h)
+set(SRC ${SRC} src/nco/nco_sld.c src/nco/nco_sld.h)
+set(SRC ${SRC} src/nco/nco_sng_utl.c src/nco/nco_sng_utl.h)
+set(SRC ${SRC} src/nco/nco_srm.c src/nco/nco_srm.h)
+set(SRC ${SRC} src/nco/nco_typ.h)
+set(SRC ${SRC} src/nco/nco_uthash.h)
+set(SRC ${SRC} src/nco/nco_var_avg.c src/nco/nco_var_avg.h)
+set(SRC ${SRC} src/nco/nco_var_lst.c src/nco/nco_var_lst.h)
+set(SRC ${SRC} src/nco/nco_var_rth.c src/nco/nco_var_rth.h)
+set(SRC ${SRC} src/nco/nco_var_scv.c src/nco/nco_var_scv.h)
+set(SRC ${SRC} src/nco/nco_var_utl.c src/nco/nco_var_utl.h)
+
+add_executable(ncks src/nco/ncks.c ${SRC})
+add_executable(ncbo src/nco/ncbo.c ${SRC})
+add_executable(ncecat src/nco/ncecat.c ${SRC})
+add_executable(ncflint src/nco/ncflint.c ${SRC})
+add_executable(ncpdq src/nco/ncpdq.c ${SRC})
+add_executable(ncra src/nco/ncra.c ${SRC})
+add_executable(ncrename src/nco/ncrename.c ${SRC})
+
+target_link_libraries (ncks ${LIB_DEP})
+target_link_libraries (ncbo ${LIB_DEP})
+target_link_libraries (ncecat ${LIB_DEP})
+target_link_libraries (ncflint ${LIB_DEP})
+target_link_libraries (ncpdq ${LIB_DEP})
+target_link_libraries (ncra ${LIB_DEP})
+target_link_libraries (ncrename ${LIB_DEP})
+
+
diff --git a/bld/Makefile b/bld/Makefile
index 2afd758..8789994 100644
--- a/bld/Makefile
+++ b/bld/Makefile
@@ -922,7 +922,20 @@ endif
ifneq (${null},$(findstring ${PVM_ARCH},LINUXALPHALINUXAMD64LINUXARMFREEBSDMACOSX))
# 20161001 Linux ar with Ubuntu Xenial began using deterministic mode. -U undoes that.
# 20161001 GNU ar -s is equivalent to ranlib
+# ar -D: Operate in deterministic mode (breaks NCO build on Ubuntu)
+# ar -r: replace existing or insert new file(s) into the archive
+# ar -s: equivalent to ranlib
+# ar -U: Do not operate in deterministic mode. This is the inverse of the D modifier, above: added files and the archive index will get their actual UID, GID, timestamp, and file mode values.
+# ar -U: Unavailable option in RHEL 6 (2010) used on Rhea and Titan
+# ar -v: be verbose
ARFLAGS := rsUv
+ifneq (${null},$(findstring rhea,${HOSTNAME}))
+ ARFLAGS := rsv
+else ifneq (${null},$(findstring titan,${HOSTNAME}))
+ ARFLAGS := rsv
+else ifneq (${null},$(findstring ys,${HOSTNAME}))
+ ARFLAGS := rsv
+endif # !rhea,titan,yellowston
CXX := ${LINUX_CXX}
CC := ${LINUX_CC}
CPP := ${CC}
@@ -1997,6 +2010,7 @@ debug: dbg
dbg:
@printf "ABI = ${ABI}\n"
@printf "AR = ${AR}\n"
+ @printf "ARFLAGS = ${ARFLAGS}\n"
@printf "BNR_SFX = ${BNR_SFX}\n"
@printf "CC = ${CC}\n"
@printf "CCACHE = ${CCACHE}\n"
diff --git a/bld/nco.spec b/bld/nco.spec
index e52f89e..585e5f8 100644
--- a/bld/nco.spec
+++ b/bld/nco.spec
@@ -2,17 +2,17 @@
# http://cvs.fedoraproject.org/viewvc/devel/nco/nco.spec?view=co
Name: nco
-Version: 4.6.2
+Version: 4.6.3
Release: 1%{?dist}
Summary: Programs that manipulate netCDF files
Group: Applications/Engineering
License: GPL3
URL: http://nco.sf.net/
-# Obtain NCO version 4.6.2-1 tar.gz from Sourceforge using CVS:
+# Obtain NCO version 4.6.3-1 tar.gz from Sourceforge using CVS:
# cvs -d:pserver:anonymous at nco.cvs.sf.net:/cvsroot/nco login
-# cvs -z3 -d:pserver:anonymous at nco.cvs.sf.net:/cvsroot/nco co -r nco-4.6.2-1 -d nco-%{version} nco
-# tar czf nco-%{version}.tar.gz --exclude='nco-4.6.2/debian*' --exclude='.cvsignore' --exclude=ncap_lex.c --exclude='ncap_yacc.[ch]' ./nco-%{version}
+# cvs -z3 -d:pserver:anonymous at nco.cvs.sf.net:/cvsroot/nco co -r nco-4.6.3-1 -d nco-%{version} nco
+# tar czf nco-%{version}.tar.gz --exclude='nco-4.6.3/debian*' --exclude='.cvsignore' --exclude=ncap_lex.c --exclude='ncap_yacc.[ch]' ./nco-%{version}
Source0: nco-%{version}.tar.gz
#Patch0: nco_install_C_headers.patch
#Patch1: nco_find_udunits-dat.patch
@@ -108,6 +108,9 @@ fi
# %{_libdir}/libnco++.so
%changelog
+* Fri Dec 23 2016 Charlie Zender <zender at uci.edu> - 4.6.3-1
+- new upstream 4.6.3
+
* Thu Nov 16 2016 Charlie Zender <zender at uci.edu> - 4.6.2-1
- new upstream 4.6.2
diff --git a/bld/nco_dst.pl b/bld/nco_dst.pl
index 8dab0c0..b8e1c77 100755
--- a/bld/nco_dst.pl
+++ b/bld/nco_dst.pl
@@ -5,20 +5,20 @@
# Usage:
# Export tagged, public versions
-# /usr/bin/scp ${DATA}/nco-4.6.2.tar.gz zender,nco at web.sf.net:/home/project-web/nco/htdocs/src
-
-# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --bld --cln nco-4.6.2 # Build, do not release on SF
-# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --bld --cln --sf nco-4.6.2 # Build, release on SF
-# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --nst_all nco-4.6.2 # Install, do not build
-# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --bld --cln --nst_all nco-4.6.2 # Build and install
-# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --acd_cnt nco-4.6.2
-# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --acd_prs nco-4.6.2
-# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --cgd_cnt nco-4.6.2
-# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --cray_prs nco-4.6.2
-# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --bbl_cnt nco-4.6.2
-# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --blk_cnt nco-4.6.2
-# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --dat_cnt nco-4.6.2
-# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --ute_prs nco-4.6.2
+# /usr/bin/scp ${DATA}/nco-4.6.3.tar.gz zender,nco at web.sf.net:/home/project-web/nco/htdocs/src
+
+# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --bld --cln nco-4.6.3 # Build, do not release on SF
+# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --bld --cln --sf nco-4.6.3 # Build, release on SF
+# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --nst_all nco-4.6.3 # Install, do not build
+# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --bld --cln --nst_all nco-4.6.3 # Build and install
+# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --acd_cnt nco-4.6.3
+# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --acd_prs nco-4.6.3
+# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --cgd_cnt nco-4.6.3
+# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --cray_prs nco-4.6.3
+# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --bbl_cnt nco-4.6.3
+# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --blk_cnt nco-4.6.3
+# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --dat_cnt nco-4.6.3
+# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --ute_prs nco-4.6.3
# Export daily snapshot
# ${HOME}/nco/bld/nco_dst.pl --dbg=2
@@ -256,7 +256,7 @@ if($bld){
# Set up FTP server
chdir $dst_pth_pfx or die "$prg_nm: ERROR unable to chdir to $dst_pth_pfx: $!\n"; # $! is system error string
cmd_prc("$cp_cmd $doc_fl ./$dst_vrs/doc"); # Copy derived documentation to source directory
- cmd_prc("$tar_cmd cvzf $dst_fl --exclude='nco-4.6.2/debian*' --exclude='.cvsignore' --exclude=ncap_lex.c --exclude=ncap_yacc.[ch] ./$dst_vrs"); # Create gzipped tarfile
+ cmd_prc("$tar_cmd cvzf $dst_fl --exclude='nco-4.6.3/debian*' --exclude='.cvsignore' --exclude=ncap_lex.c --exclude=ncap_yacc.[ch] ./$dst_vrs"); # Create gzipped tarfile
cmd_prc("$rsh_cmd $www_mch $rm_cmd $www_drc/src/$dst_fl"); # Remove any distribution with same name
if($dly_snp){cmd_prc("$rsh_cmd $www_mch $rm_cmd -r $www_drc/src/nco-????????.tar.gz");} # Remove previous daily snapshots from WWW server
cmd_prc("$rcp_cmd $dst_fl $www_mch:$www_drc/src"); # Copy local tarfile to WWW server
diff --git a/bm/NCO_rgr.pm b/bm/NCO_rgr.pm
index 99b8481..4db5e4a 100644
--- a/bm/NCO_rgr.pm
+++ b/bm/NCO_rgr.pm
@@ -442,6 +442,15 @@ if($USER eq 'zender'){
NCO_bm::tst_run(\@tst_cmd);
$#tst_cmd=0; # Reset array
+ $dsc_sng="Run script to test udunits code";
+ $tst_cmd[0]="ncap2 -h -O $fl_fmt $nco_D_flg -v -S '../data/tst-udunits.nco' $in_pth_arg in.nc %tmp_fl_00%";
+ $tst_cmd[1]="ncks -C -H -v nbr_err -s '%d' %tmp_fl_00%";
+ $tst_cmd[2]="0";
+ $tst_cmd[3]="SS_OK";
+ NCO_bm::tst_run(\@tst_cmd);
+ $#tst_cmd=0; # Reset array
+
+
if($dodap eq "FALSE"){
@@ -3466,7 +3475,8 @@ if($RUN_NETCDF4_TESTS_VERSION_GE_431){
#ncpdq #41
#ncpdq -O -a -lat -g g23,g24 ~/nco/data/in_grp_3.nc ~/foo.nc
- $dsc_sng="(Groups) Reverse -a lat -g g24 several groups";
+#ncks -v lat -g g24 ~/foo.nc
+ $dsc_sng="(Groups) Reverse (-a -lat) several groups (-g g23,g24)";
$tst_cmd[0]="ncpdq $omp_flg $fl_fmt $nco_D_flg -O -a -lat -g g23,g24 $in_pth_arg in_grp_3.nc %tmp_fl_00%";
$tst_cmd[1]="ncks -v lat -g g24 %tmp_fl_00%";
$tst_cmd[2]="lat[1]=-60 degrees_north";
@@ -3475,9 +3485,9 @@ if($RUN_NETCDF4_TESTS_VERSION_GE_431){
$#tst_cmd=0; # Reset array
#ncpdq #42
-#ncpdq -O -v lat,lon -a -lat,-lon -g g1,g2 ~/nco/data/in_grp_8.nc out1.nc
-#ncks -H out1.nc
- $dsc_sng="(Groups) Reverse -a -lat,-lon several groups";
+#ncpdq -O -v lat,lon -a -lat,-lon -g g1,g2 ~/nco/data/in_grp_8.nc ~/foo.nc
+#ncks -H -v lon -g g2 ~/foo.nc
+ $dsc_sng="(Groups) Reverse (-a -lat,-lon) several groups (-g g1,g2)";
$tst_cmd[0]="ncpdq $omp_flg $fl_fmt $nco_D_flg -O -a -lat,-lon -g g1,g2 $in_pth_arg in_grp_8.nc %tmp_fl_00%";
$tst_cmd[1]="ncks -H -v lon -g g2 %tmp_fl_00%";
$tst_cmd[2]="lon[2]=0";
@@ -3486,9 +3496,9 @@ if($RUN_NETCDF4_TESTS_VERSION_GE_431){
$#tst_cmd=0; # Reset array
#ncpdq #43
-#ncpdq -O -v lat,lon -a lat,-lon -g g1,g2 ~/nco/data/in_grp_8.nc out1.nc
-#ncks -H out1.nc
- $dsc_sng="(Groups) Reverse -a lat,-lon several groups";
+#ncpdq -O -v lat,lon -a lat,-lon -g g1,g2 ~/nco/data/in_grp_8.nc ~/foo.nc
+#ncks -H -v lon -g g2 ~/foo.nc
+ $dsc_sng="(Groups) Reverse (-a lat,-lon) several groups (-g g1,g2)";
$tst_cmd[0]="ncpdq $omp_flg $fl_fmt $nco_D_flg -O -a lat,-lon -g g1,g2 $in_pth_arg in_grp_8.nc %tmp_fl_00%";
$tst_cmd[1]="ncks -H -v lon -g g2 %tmp_fl_00%";
$tst_cmd[2]="lon[2]=0";
@@ -3497,7 +3507,7 @@ if($RUN_NETCDF4_TESTS_VERSION_GE_431){
$#tst_cmd=0; # Reset array
#ncpdq #44
-#ncpdq -O -v lat,lon -a -lat,lon -g g1,g2 ~/nco/data/in_grp_8.nc out1.nc
+#ncpdq -O -v lat,lon -a -lat,lon -g g1,g2 ~/nco/data/in_grp_8.nc ~/foo.nc
#ncks -H out1.nc
$dsc_sng="(Groups) Reverse -a -lat,lon several groups";
$tst_cmd[0]="ncpdq $omp_flg $fl_fmt $nco_D_flg -O -a -lat,lon -g g1,g2 $in_pth_arg in_grp_8.nc %tmp_fl_00%";
@@ -3858,6 +3868,45 @@ if($RUN_NETCDF4_TESTS_VERSION_GE_431){
$tst_cmd[4]="SS_OK";
NCO_bm::tst_run(\@tst_cmd);
$#tst_cmd=0; # Reset array
+
+
+#ncrcat #22
+
+ $tst_cmd[0]="ncap2 -h -O $fl_fmt $nco_D_flg -v -s 'time\@units=\"hours since 1970-01-01\"' $in_pth_arg in.nc %tmp_fl_00%";
+ $tst_cmd[1]="ncap2 -h -O $fl_fmt $nco_D_flg -v -s 'time\@units=\"days since 1970-01-01 10:00:00\"' $in_pth_arg in.nc %tmp_fl_01%";
+ $tst_cmd[2]="ncra -Y ncrcat -O $fl_fmt $nco_D_flg -C -v time %tmp_fl_00% %tmp_fl_01% %tmp_fl_02% 2> %tmp_fl_05%";
+ $tst_cmd[3]="ncap2 -O -v -C -s 'time_ttl=time.total();print(time_ttl)' %tmp_fl_02% %tmp_fl_03%";
+ $tst_cmd[4]="time_ttl = 1475";
+ $dsc_sng="Concatenate 1D variable across two files. [hours file1 - days file2] .Requires UDUnits.";
+ $tst_cmd[5]="SS_OK";
+ NCO_bm::tst_run(\@tst_cmd);
+ $#tst_cmd=0; # Reset array
+
+#ncrcat #23
+
+ $tst_cmd[0]="ncks -O $fl_fmt $nco_D_flg -v time $in_pth_arg in.nc %tmp_fl_00%";
+ $tst_cmd[1]="ncatted -h -O $fl_fmt $nco_D_flg -a units,time,\"kelvin\" %tmp_fl_00%";
+ $tst_cmd[2]="ncra -Y ncrcat -O $fl_fmt $nco_D_flg -C -v time -d time,'-272 Celsius','-270 Celsius' %tmp_fl_00% %tmp_fl_01% 2> %tmp_fl_05%";
+ $tst_cmd[3]="ncap2 -O -v -C -s 'time_ttl=time.total();print(time_ttl)' %tmp_fl_01% %tmp_fl_02%";
+ $tst_cmd[4]="time_ttl = 5";
+ $dsc_sng="Concatenate 1D variable across 1 file with temperature (Celsius) limits";
+ $tst_cmd[5]="SS_OK";
+ NCO_bm::tst_run(\@tst_cmd);
+ $#tst_cmd=0; # Reset array
+
+#ncrcat #24
+
+ $tst_cmd[0]="ncap2 -h -O $fl_fmt $nco_D_flg -v -s 'time\@units=\"days since 2012-01-28\"' $in_pth_arg in.nc %tmp_fl_00%";
+ $tst_cmd[1]="ncap2 $fl_fmt $nco_D_flg -A -v -s 'time\@calendar=\"360_day\"' $in_pth_arg in.nc %tmp_fl_00%";
+ $tst_cmd[2]="ncra -Y ncrcat -O $fl_fmt $nco_D_flg -C -v time -d time,'2012-01-29','2012-02-02' %tmp_fl_00% %tmp_fl_01% 2> %tmp_fl_05%";
+ $tst_cmd[3]="ncap2 -O -v -C -s 'time_ttl=time.total();print(time_ttl)' %tmp_fl_01% %tmp_fl_02%";
+ $tst_cmd[4]="time_ttl = 10";
+ $dsc_sng="Concatenate 1D variable across 1 file. [limits - timstamp day_360 calendar] .Requires UDUnits.";
+ $tst_cmd[5]="SS_OK";
+ NCO_bm::tst_run(\@tst_cmd);
+ $#tst_cmd=0; # Reset array
+
+
#######################################
#### Group tests (requires netCDF4) ###
@@ -3865,7 +3914,7 @@ if($RUN_NETCDF4_TESTS_VERSION_GE_431){
if($RUN_NETCDF4_TESTS){
-#ncrcat #22
+#ncrcat #25
# same as ncrcat #02 with group
$tst_cmd[0]="ncra -Y ncrcat $omp_flg -h -O $fl_fmt $nco_D_flg -g g4 -v one_dmn_rec_var $in_pth_arg in_grp.nc in_grp.nc -d time,2.,3. %tmp_fl_00% 2> %tmp_fl_02%";
@@ -3876,7 +3925,7 @@ if($RUN_NETCDF4_TESTS_VERSION_GE_431){
NCO_bm::tst_run(\@tst_cmd);
$#tst_cmd=0; # Reset array
-#ncrcat #23
+#ncrcat #26
# 2 groups each one with a record (part 1)
# ncra -Y ncrcat -h -O -g g25g1,g25g2 -v one_dmn_rec_var -p ~/nco/data in_grp_3.nc in_grp_3.nc -d time,2.,3. ~/foo.nc
@@ -3888,7 +3937,7 @@ if($RUN_NETCDF4_TESTS_VERSION_GE_431){
NCO_bm::tst_run(\@tst_cmd);
$#tst_cmd=0; # Reset array
-#ncrcat #24
+#ncrcat #27
# 2 groups each one with a record (part 2)
# ncra -Y ncrcat -h -O -g g25g1,g25g2 -v one_dmn_rec_var -p ~/nco/data in_grp_3.nc in_grp_3.nc -d time,2.,3. ~/foo.nc
@@ -3900,7 +3949,7 @@ if($RUN_NETCDF4_TESTS_VERSION_GE_431){
NCO_bm::tst_run(\@tst_cmd);
$#tst_cmd=0; # Reset array
-#ncrcat #25
+#ncrcat #28
#same as #03
$tst_cmd[0]="ncra -Y ncrcat $omp_flg -h -O $fl_fmt $nco_D_flg -v three_dmn_var_dbl $in_pth_arg -d time,,2 -d lat,0,0 -d lon,0,0 -d lon,3,3 in_grp_3.nc in_grp_3.nc %tmp_fl_00% 2> %tmp_fl_02%";
@@ -3914,7 +3963,7 @@ if($RUN_NETCDF4_TESTS_VERSION_GE_431){
} #### Group tests
-# ncrcat #26
+# ncrcat #29
# Detect input_complete when stride skips user-specified idx_end_max
# ncrcat -O -C -v time -d time,0,10,9,1 -p ~/nco/data in.nc in.nc ~/foo.nc
# ncks -C -H -s '%g, ' -v time ~/foo.nc
@@ -3926,7 +3975,7 @@ if($RUN_NETCDF4_TESTS_VERSION_GE_431){
NCO_bm::tst_run(\@tst_cmd);
$#tst_cmd=0; # Reset array
-# ncrcat #27
+# ncrcat #30
# Another detect input_complete when stride skips user-specified idx_end_max
# ncks -O -C -v time -d time,0,2 ~/nco/data/in.nc ~/foo1.nc
# ncks -O -C -v time -d time,3,5 ~/nco/data/in.nc ~/foo2.nc
@@ -3948,7 +3997,7 @@ if($RUN_NETCDF4_TESTS_VERSION_GE_431){
if($RUN_NETCDF4_TESTS){
-#ncrcat #28
+#ncrcat #31
#ncks -h -O -g g4 -v one_dmn_rec_var ~/nco/data/in_grp.nc in_grp1.nc
#ncrcat -h --no_tmp_fl --rec_apn -v one_dmn_rec_var ~/nco/data/in_grp.nc in_grp1.nc
@@ -3961,7 +4010,7 @@ if($RUN_NETCDF4_TESTS_VERSION_GE_431){
NCO_bm::tst_run(\@tst_cmd);
$#tst_cmd=0; # Reset array
-#ncrcat #29
+#ncrcat #32
#ncks -h -O -g g5 -v one_dmn_rec_var,time51,time52 ~/nco/data/in_grp.nc in_grp1.nc
#ncrcat -h --no_tmp_fl --rec_apn -g g5 -v one_dmn_rec_var ~/nco/data/in_grp.nc in_grp1.nc
@@ -3974,7 +4023,7 @@ if($RUN_NETCDF4_TESTS_VERSION_GE_431){
NCO_bm::tst_run(\@tst_cmd);
$#tst_cmd=0; # Reset array
-#ncrcat #30
+#ncrcat #33
#ncks -h -O -g g5 -v one_dmn_rec_var,time51,time52 ~/nco/data/in_grp.nc in_grp1.nc
#ncks -h -O -g g5 -v one_dmn_rec_var,time51,time52 ~/nco/data/in_grp.nc in_grp2.nc
#ncrcat -O -h -g g5 -v one_dmn_rec_var -p ~/nco/data in_grp1.nc in_grp2.nc ~/foo.nc
diff --git a/configure b/configure
index e04251b..908da8b 100755
--- a/configure
+++ b/configure
@@ -1,6 +1,6 @@
#! /bin/sh
# Guess values for system-dependent variables and create Makefiles.
-# Generated by GNU Autoconf 2.69 for NCO netCDF Operators 4.6.2.
+# Generated by GNU Autoconf 2.69 for NCO netCDF Operators 4.6.3.
#
# Report bugs to <nco-bugs at lists.sourceforge.net>.
#
@@ -592,8 +592,8 @@ MAKEFLAGS=
# Identity of this package.
PACKAGE_NAME='NCO netCDF Operators'
PACKAGE_TARNAME='nco'
-PACKAGE_VERSION='4.6.2'
-PACKAGE_STRING='NCO netCDF Operators 4.6.2'
+PACKAGE_VERSION='4.6.3'
+PACKAGE_STRING='NCO netCDF Operators 4.6.3'
PACKAGE_BUGREPORT='nco-bugs at lists.sourceforge.net'
PACKAGE_URL=''
@@ -1394,7 +1394,7 @@ if test "$ac_init_help" = "long"; then
# Omit some internal or obsolete options to make the list less imposing.
# This message is too long to be a string in the A/UX 3.1 sh.
cat <<_ACEOF
-\`configure' configures NCO netCDF Operators 4.6.2 to adapt to many kinds of systems.
+\`configure' configures NCO netCDF Operators 4.6.3 to adapt to many kinds of systems.
Usage: $0 [OPTION]... [VAR=VALUE]...
@@ -1465,7 +1465,7 @@ fi
if test -n "$ac_init_help"; then
case $ac_init_help in
- short | recursive ) echo "Configuration of NCO netCDF Operators 4.6.2:";;
+ short | recursive ) echo "Configuration of NCO netCDF Operators 4.6.3:";;
esac
cat <<\_ACEOF
@@ -1633,7 +1633,7 @@ fi
test -n "$ac_init_help" && exit $ac_status
if $ac_init_version; then
cat <<\_ACEOF
-NCO netCDF Operators configure 4.6.2
+NCO netCDF Operators configure 4.6.3
generated by GNU Autoconf 2.69
Copyright (C) 2012 Free Software Foundation, Inc.
@@ -2499,7 +2499,7 @@ cat >config.log <<_ACEOF
This file contains any messages produced by compilers while
running configure, to aid debugging if configure makes a mistake.
-It was created by NCO netCDF Operators $as_me 4.6.2, which was
+It was created by NCO netCDF Operators $as_me 4.6.3, which was
generated by GNU Autoconf 2.69. Invocation command line was
$ $0 $@
@@ -3750,7 +3750,7 @@ fi
# Define the identity of the package.
PACKAGE='nco'
- VERSION='4.6.2'
+ VERSION='4.6.3'
cat >>confdefs.h <<_ACEOF
@@ -21494,7 +21494,7 @@ cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
# report actual input values of CONFIG_FILES etc. instead of their
# values after options handling.
ac_log="
-This file was extended by NCO netCDF Operators $as_me 4.6.2, which was
+This file was extended by NCO netCDF Operators $as_me 4.6.3, which was
generated by GNU Autoconf 2.69. Invocation command line was
CONFIG_FILES = $CONFIG_FILES
@@ -21560,7 +21560,7 @@ _ACEOF
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
ac_cs_version="\\
-NCO netCDF Operators config.status 4.6.2
+NCO netCDF Operators config.status 4.6.3
configured by $0, generated by GNU Autoconf 2.69,
with options \\"\$ac_cs_config\\"
diff --git a/configure.ac b/configure.ac
index 0b8ea7d..d7a5f1e 100644
--- a/configure.ac
+++ b/configure.ac
@@ -51,7 +51,7 @@
# Process configure input with autoconf to produce configure script
# (package name,version,bug-report-address,tarball name)
-AC_INIT([NCO netCDF Operators],[4.6.2],[nco-bugs at lists.sourceforge.net],[nco])
+AC_INIT([NCO netCDF Operators],[4.6.3],[nco-bugs at lists.sourceforge.net],[nco])
# Print GNU copyright in configure script
AC_COPYRIGHT
diff --git a/configure.eg b/configure.eg
index 74de8e6..42295b8 100644
--- a/configure.eg
+++ b/configure.eg
@@ -368,6 +368,24 @@ make check >> nco.make.${GNU_TRP}.foo 2>&1
make install >> nco.make.${GNU_TRP}.foo 2>&1
scp nco.configure.${GNU_TRP}.foo nco.config.log.${GNU_TRP}.foo nco.libtool.${GNU_TRP}.foo nco.make.${GNU_TRP}.foo dust.ess.uci.edu:/var/www/html/nco/rgr
+# gcc/g++ Zender uses this to develop/install/update netCDF4-enabled NCO in personal directories on blues
+# fails to link to udunits...why?
+# nm -a /soft/udunits/2.1.21/lib/libudunits2.a | grep ut_read_xml
+# First use: 20161223 Latest use: 20161223
+# soft add +antlr-2.7.7 # 20161223 this ANTLR appears to be stock and not NCO-compatible
+# soft add +udunits-2.1.21 # 20161223 this UDUnits was compiled without -fPIC so fails with NCO
+soft add +netcdf-4.3.3.1-gnu5.2-serial
+soft add +gsl-2.3
+export GNU_TRP=`~/nco/autobld/config.guess`
+cd ~/nco;/bin/rm -f *.${GNU_TRP}.foo;make distclean
+ANTLR_ROOT=${HOME} CC='gcc' CXX='g++' NETCDF_ROOT='/soft/netcdf_serial/4.3.3.1/gnu-5.2' UDUNITS2_PATH=${HOME} ./configure --prefix=${HOME} --bindir=${MY_BIN_DIR} --datadir=${HOME}/nco/data --libdir=${MY_LIB_DIR} --mandir=${HOME}/nco/man > nco.configure.${GNU_TRP}.foo 2>&1
+/bin/cp -f config.log nco.config.log.${GNU_TRP}.foo
+/bin/cp -f libtool nco.libtool.${GNU_TRP}.foo
+make clean;make > nco.make.${GNU_TRP}.foo 2>&1
+make check >> nco.make.${GNU_TRP}.foo 2>&1
+make install >> nco.make.${GNU_TRP}.foo 2>&1
+scp nco.configure.${GNU_TRP}.foo nco.config.log.${GNU_TRP}.foo nco.libtool.${GNU_TRP}.foo nco.make.${GNU_TRP}.foo dust.ess.uci.edu:/var/www/html/nco/rgr
+
# gcc/g++ Zender uses this to develop/install/update netCDF4-enabled NCO in personal directories on pileus:
export GNU_TRP=`~/nco/autobld/config.guess`
cd ~/nco;/bin/rm -f *.${GNU_TRP}.foo;make distclean
@@ -386,6 +404,7 @@ export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/glade/apps/opt/netcdf/4.3.0/intel/12.
cd ~/nco/bld;make NETCDF_ROOT='/glade/apps/opt/netcdf/4.3.0/intel/default' UDUNITS_INC='/glade/apps/opt/udunits/2.1.24/intel/12.1.4/include' UDUNITS_LIB='/glade/apps/opt/udunits/2.1.24/intel/12.1.4/lib' OPTS=D allinone;cd -
# gcc/g++ 20160107 Zender uses this to develop/install/update netCDF4-enabled NCO in personal directories on rhea:
+# val anantharaj packages these and makes them available with "module load nco/4.6.1"
export LINUX_CC='gcc -std=c99 -pedantic -D_DEFAULT_SOURCE -D_BSD_SOURCE -D_POSIX_SOURCE'
export LINUX_CXX='g++ -std=c++11'
export LINUX_FC='gfortran'
@@ -397,7 +416,7 @@ export NETCDF_ROOT='/sw/redhat6/netcdf/4.3.3.1/rhel6.6_gcc4.8.2--with-dap+hdf4'
export PATH='/sw/redhat6/netcdf/4.3.3.1/rhel6.6_gcc4.8.2--with-dap+hdf4/bin':${PATH}
cd ~/nco/bld;make ANTLR_ROOT=${HOME} NETCDF_ROOT='/sw/redhat6/netcdf/4.3.3.1/rhel6.6_gcc4.8.2--with-dap+hdf4' SZ=Y SZ_LIB='/sw/redhat6/szip/2.1/rhel6.6_gnu4.8.2/lib' UDUNITS_INC='/sw/redhat6/udunits/2.1.24/rhel6.4_intel13.1.3/include' UDUNITS_LIB='/sw/redhat6/udunits/2.1.24/rhel6.4_intel13.1.3/lib' OPTS=D OMP=Y allinone;cd -
-# gcc/g++ 2016420 Zender uses this to develop/install/update netCDF4-enabled NCO in personal directories on titan:
+# gcc/g++ 20160420 Zender uses this to develop/install/update netCDF4-enabled NCO in personal directories on titan:
export LINUX_CC='gcc -std=c99 -pedantic -D_DEFAULT_SOURCE -D_BSD_SOURCE -D_POSIX_SOURCE'
module add gcc # change GCC from v 4.3.4 to 4.9.0
#module add gsl # not used yet
diff --git a/data/in.cdl b/data/in.cdl
index e2f4eac..9e409e9 100644
--- a/data/in.cdl
+++ b/data/in.cdl
@@ -66,7 +66,7 @@ dimensions:
dgn=1,bnd=2,lat=2,lat_grd=3,lev=3,rlev=3,ilev=4,lon=4,lon_grd=5,char_dmn_lng80=80,char_dmn_lng26=26,char_dmn_lng04=4,date_dmn=5,fl_dmn=3,lsmlev=6,wvl=2,time_udunits=3;lon_T42=128,lat_T42=64,lat_times_lon=8,gds_crd=8,gds_ncd=8,vrt_nbr=2,lon_cal=10,lat_cal=10,Lon=4,Lat=2,time=unlimited;
variables:
:Conventions = "CF-1.5";
- :history = "History global attribute.\nTextual attributes like history often have embedded newlines like this.\nSuch newlines should serve as linebreaks on the screen to enhance legibility like this.\nFriendly CDL converters print a single NC_CHAR attribute as a comma-separated list of strings where each embedded delimiter marks a linebreak. This makes poetry embedded in CDL much nicer to read:\n\nA POET by Hafiz\n\nA poet is someone\nWho can pour light into a cup,\nThen raise it to no [...]
+ :history = "History global attribute.\nTextual attributes like history often have embedded newlines like this.\nSuch newlines should serve as linebreaks on the screen to enhance legibility like this.\nFriendly CDL converters print a single NC_CHAR attribute as a comma-separated list of strings where each embedded delimiter marks a linebreak. This makes poetry embedded in CDL much nicer to read (except for the printed literal \\n\'s---those are an eyesore):\n\nA POET by Hafiz\n\nA poet [...]
:lorem_ipsum = "The Lorem Ipsum attribute demonstrates the legibility of text without embedded linebreaks:\nLorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Lady Gaga amat indueris vestimento laetus. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Exce [...]
:julian_day = 200000.04;
:RCS_Header = "$Header$";
diff --git a/data/in_4.cdl b/data/in_4.cdl
index 4f023d4..dee9b64 100644
--- a/data/in_4.cdl
+++ b/data/in_4.cdl
@@ -74,7 +74,10 @@ variables:
string_var2:long_name = "string-type variable with default XML separator";
string string_arr_var(lon);
- string_arr_var:long_name = "arrays of string-types";
+ string_arr_var:long_name = "arrays of string-types";
+
+ string string_greek(lat,lon);
+ string_greek:long_name = "2D arrayof string-types";
ubyte ubyte_var;
ubyte_var:long_name = "ubyte-type variable";
@@ -539,12 +542,10 @@ variables:
att_var:float_att = 73.0f,72.0f,71.0f,70.010f,69.0010f,68.010000f,67.01000100f;
att_var:double_att = 73.0,72.0,71.0,70.010,69.0010,68.010000,67.01000100;
att_var:ubyte_att = 0ub, 1ub, 2ub, 127ub, 128ub, 254ub, 255ub, 0ub; // 20130712: ncgen 4.3.0 translates this to byte
- att_var:ubyte2_att = '\000','\001','\002','\177','\200','\201','\376','\377'; // 20130712: ncgen 4.3.0 translates this to byte
att_var:ushort_att = 37us;
att_var:uint_att = 73ul; // 20161110: ncgen requires "ul" for uint (a simple "u" is interpretd as uint64)
- att_var:int64_att = 9223372036854775807ll; // 20130712: ncgen 4.3.0 chokes on ll, LL suffix for int64 attributes, and translates -l suffixed numbers to NC_UINT64. Bug reported (netCDF #HEX-795132) and fixed in snapshot 20131117
-// att_var:uint64_att = 18446744073709551615ul; // 20130712: ncgen 4.3.0 chokes on ull, ULL suffix for uint64 attributes. 20131120: XML treats unsigned as signed types, so toolsui produces errors when reading maximum uint64 values (though not when reading maxium ubyte, ushort, and uint values)
- att_var:uint64_att = 0ull;
+// att_var:int64_att = 9223372036854775807ll; // 20130712: ncgen 4.3.0--4.3.3.1 choke on ll, LL suffix for int64 attributes, and translates -l suffixed numbers to NC_UINT64. Bug reported (netCDF #HEX-795132) and fixed in snapshot 20131117
+// att_var:uint64_att = 18446744073709551615ul; // 20130712: ncgen 4.3.0--4.3.3.1 chokes on ull, ULL suffix for uint64 attributes. 20131120: XML treats unsigned as signed types, so toolsui produces errors when reading maximum uint64 values (though not when reading maxium ubyte, ushort, and uint values)
att_var:string_att1 = "20130723: ncgen -k netCDF-4 version 4.0.x converts quoted string to NC_STRING not NC_CHAR. Incompatible with classic behavior.";
att_var:string_att2 = "20130712: ncgen -k netCDF-4 version 4.2.x+ converts quoted string to NC_CHAR not NC_STRING. Backwards compatible with classic behavior.";
string att_var:string_att3 = "20130723: Prefixing full attribute name with \"string\" causes CDL to produce NC_STRING.";
@@ -957,6 +958,9 @@ data:
int64_var=9223372036854775807;
string_var="If you prick us, do we not bleed? If you tickle us, do we not laugh? If you poison us, do we not die? And if you wrong us, shall we not revenge?";
string_arr_var="20130723: Arrays of NC_STRING are simply separated by commas.","If there are commas within a string, well, then, be careful. Did we \"jump the shark\"?","This array comprises three sentences.","Woops, I mean four.";
+
+ string_greek="alpha","beta","gamma","delta","epsilon","zeta","theta","iota";
+
string_var2="20131121: This NC_STRING contains NCO's default XML string separator, *. And it contains multiple sequential copies of that separator, ***. Be sure ncks automatically switches to a different separator, aka the backup separator, the string obtained by removing the two spaces from this:\"* | *\". As of 20131121, ncks handles this correctly, yet toolsui breaks when trying to input the ncks-generated NcML. Appears to be a toolsui bug.";
ubyte_var='z';
diff --git a/data/ncclimo b/data/ncclimo
index 617935d..4d129db 100755
--- a/data/ncclimo
+++ b/data/ncclimo
@@ -33,15 +33,17 @@
# Source: https://github.com/nco/nco/tree/master/data/ncclimo
# Documentation: http://nco.sf.net/nco.html#ncclimo
# Additional Documentation:
-# HowTo: https://acme-climate.atlassian.net/wiki/display/ATM/Generating+Climo+files
+# HowTo: https://acme-climate.atlassian.net/wiki/display/ATM/Generating+and+Regridding+Climatologies+%28climo+files%29+with+NCO+and+ncclimo
# ACME Climatology Requirements: https://acme-climate.atlassian.net/wiki/display/ATM/Climo+Files+-+v0.3+AMIP+runs
# Direct install:
# scp ~/nco/data/ncclimo aims4.llnl.gov:bin
+# scp ~/nco/data/ncclimo blues.lcrc.anl.gov:bin
# scp ~/nco/data/ncclimo cooley.alcf.anl.gov:bin
# scp ~/nco/data/ncclimo cori.nersc.gov:bin_cori
# scp ~/nco/data/ncclimo edison.nersc.gov:bin_edison
# scp ~/nco/data/ncclimo rhea.ccs.ornl.gov:bin_rhea
+# scp ~/nco/data/ncclimo skyglow.ess.uci.edu:bin
# scp ~/nco/data/ncclimo yellowstone.ucar.edu:bin
# scp dust.ess.uci.edu:nco/data/ncclimo ~/bin
@@ -57,6 +59,7 @@ while [ -h "${spt_src}" ]; do # Recursively resolve ${spt_src} until file is no
spt_src="$(readlink "${spt_src}")"
[[ ${spt_src} != /* ]] && spt_src="${drc_spt}/${spt_src}" # If ${spt_src} was relative symlink, resolve it relative to path where symlink file was located
done
+cmd_ln="${spt_src} ${@}"
drc_spt="$( cd -P "$( dirname "${spt_src}" )" && pwd )"
spt_nm=$(basename ${spt_src}) # [sng] Script name (Unlike $0, ${BASH_SOURCE[0]} works well with 'source <script>')
spt_pid=$$ # [nbr] Script PID (process ID)
@@ -73,6 +76,7 @@ fi # HOSTNAME
if [ -z "${DATA}" ]; then
case "${HOSTNAME}" in
constance* | node* ) DATA='/scratch' ; ;; # PNNL
+ blues* | blogin* | b590 ) DATA="/lcrc/project/ACME/${USER}" ; ;; # ALCF blues compute nodes named bNNN, 16|64 cores|GB/node
cooley* | cc* | mira* ) DATA="/projects/HiRes_EarthSys/${USER}" ; ;; # ALCF cooley compute nodes named ccNNN, 384 GB/node
cori* | edison* ) DATA="${SCRATCH}" ; ;; # NERSC cori/edison compute nodes all named nidNNNNN, edison 24|64 cores|GB/node; cori 32|128 cores|GB/node (cori login nodes 512 GB)
pileus* ) DATA="/lustre/atlas/world-shared/cli115/${USER}" ; ;; # OLCF CADES
@@ -86,6 +90,9 @@ case "${HOSTNAME}" in
aims* )
export PATH='/export/zender1/bin'\:${PATH}
export LD_LIBRARY_PATH='/export/zender1/lib'\:${LD_LIBRARY_PATH} ; ;;
+ blues* | blogin* | b590 )
+ export PATH='/home/zender/bin'\:${PATH}
+ export LD_LIBRARY_PATH='/home/zender/lib'\:${LD_LIBRARY_PATH} ; ;;
cooley* )
# 20160421: Split cooley from mira binary locations to allow for different system libraries
# http://www.mcs.anl.gov/hs/software/systems/softenv/softenv-intro.html
@@ -162,9 +169,16 @@ esac # !HOSTNAME
# Incremental climo testing:
# ncclimo -v FSNT,AODVIS -c famipc5_ne30_v0.3_00003 -s 1980 -e 1981 -i ${DATA}/ne30/raw -o ${DATA}/ne30/prv -r ${DATA}/maps/map_ne30np4_to_fv129x256_aave.20150901.nc
# ncclimo -v FSNT,AODVIS -c famipc5_ne30_v0.3_00003 -s 1982 -e 1983 -i ${DATA}/ne30/raw -o ${DATA}/ne30/clm -r ${DATA}/maps/map_ne30np4_to_fv129x256_aave.20150901.nc -x ${DATA}/ne30/prv -X ${DATA}/ne30/xtn -S 1980
+# Binary climo testing:
+# ncclimo -v FSNT,AODVIS -c famipc5_ne30_v0.3_00003 -S 1980 -E 1981 -x ${DATA}/ne30/prv -s 1982 -e 1983 -i ${DATA}/ne30/clm -X ${DATA}/ne30/xtn
+
+# Annual climo testing:
+# ncclimo -C ann -m cism -h h -c b.e10.BG20TRCN.f09_g16.002 -s 1851 -e 2006 -i /lustre/atlas1/cli115/proj-shared/4ue/data/for_charlie -o ${DATA}/ne30/clm
+# ncclimo -C ann -m cism -h h -c b.e10.BG20TRCN.f09_g16.002 -s 1851 -e 1852 -i /lustre/atlas1/cli115/proj-shared/4ue/data/for_charlie -o ${DATA}/ne30/clm > ~/ncclimo.out 2>&1 &
# Debugging and Benchmarking:
# ncclimo -v FSNT,AODVIS -c famipc5_ne30_v0.3_00003 -s 1980 -e 1983 -i ${DATA}/ne30/raw -o ${DATA}/ne30/clm -r ${DATA}/maps/map_ne30np4_to_fv129x256_aave.20150901.nc
+# ncclimo --var_lst=FSNT,AODVIS --caseid=famipc5_ne30_v0.3_00003 --yr_srt=1980 --yr_end=1983 --drc_in=${DATA}/ne30/raw --drc_out=${DATA}/ne30/clm --rgr_map=${DATA}/maps/map_ne30np4_to_fv129x256_aave.20150901.nc
# ncclimo -v TOTEXTTAU -c merra2_198001.nc4 -s 1980 -e 2015 -a sdd -i ${DATA}/merra2/raw -o ${DATA}/merra2/clm
# ncclimo > ~/ncclimo.out 2>&1 &
# ncclimo -c B1850C5e1_ne30 -s 2 -e 199 > ~/ncclimo.out 2>&1 &
@@ -176,11 +190,17 @@ esac # !HOSTNAME
# for fl in `ls hist.*` ; do
# ncatted -O -t -a _FillValue,,o,d,-9.99999979021476795361e+33 ${fl}
# done
+# New MPAS filename conventions (as of ~201612):
+# ncclimo -v timeMonthly_avg_activeTracers_temperature -s 2 -e 3 -m mpaso -i /scratch2/scratchdirs/golaz/ACME_simulations/20161117.beta0.A_WCYCL1850S.ne30_oEC_ICG.edison/run -r ${DATA}/maps/map_oEC60to30_to_t62_bilin.20160301.nc -o ${DATA}/mpas/clm > ~/ncclimo.out 2>&1 &
+ # ncclimo -v timeSeriesStatsMonthly_avg_iceAreaCell_1 -s 2 -e 3 -m mpascice -i /scratch2/scratchdirs/golaz/ACME_simulations/20161117.beta0.A_WCYCL1850S.ne30_oEC_ICG.edison/run -r ${DATA}/maps/map_oEC60to30_to_t62_bilin.20160301.nc -o ${DATA}/mpas/clm > ~/ncclimo.out 2>&1 &
+# Old MPAS filename conventions (until ~201609)::
# ncclimo -v temperature -c hist -s 2 -e 3 -m ocn -i /lustre/atlas1/cli112/proj-shared/golaz/ACME_simulations/20160121.A_B2000ATMMOD.ne30_oEC.titan.a00/run -r ${DATA}/maps/map_oEC60to30_to_t62_bilin.20160301.nc -o ${DATA}/mpas/clm > ~/ncclimo.out 2>&1 &
# ncclimo -v iceAreaCell -c hist -s 2 -e 3 -m ice -i /lustre/atlas1/cli112/proj-shared/golaz/ACME_simulations/20160121.A_B2000ATMMOD.ne30_oEC.titan.a00/run -r ${DATA}/maps/map_oEC60to30_to_t62_bilin.20160301.nc -o ${DATA}/mpas/clm > ~/ncclimo.out 2>&1 &
# Best performance on resolutions finer than ne30 (~1x1 degree) requires a job scheduler/batch processor
-# Cobalt (cooley), SLURM (cori,edison), Torque (a PBS-variant) (hopper), and PBS (rhea) schedulers allow both interactive and non-interactive (i.e., script) batch jobs
+# Cobalt (cooley), SLURM (cori,edison), Maui (a PBS-variant) (blues), Torque (a PBS-variant) (hopper), and PBS (rhea) schedulers allow both interactive and non-interactive (i.e., script) batch jobs
+# ALCF Maui:
+# http://www.lcrc.anl.gov/for-users/using-lcrc/running-jobs
# ALCF Cobalt:
# softenv # lists available packages
# http://www.mcs.anl.gov/hs/software/systems/softenv/softenv-intro.html
@@ -198,6 +218,7 @@ esac # !HOSTNAME
# PBS: qsub -I, qsub, qstat, qdel
# SLURM: salloc, sbatch, squeue, scancel
# Interactive queue: a) Reserve nodes and acquire prompt on control node b) Execute ncclimo command interactively
+# Blues: qsub -I -A ACME -q acme -l nodes=12 -l walltime=00:30:00 -N ncclimo
# Cooley: qsub -I -A HiRes_EarthSys --nodecount=12 --time=00:30:00 --jobname=ncclimo
# Cori: salloc -A acme --nodes=12 --partition=debug --time=00:30:00 --job-name=ncclimo # NB: 30 minute limit, Edison too
# Hopper: qsub -I -A acme -V -l mppwidth=288 -l walltime=00:30:00 -q debug -N ncclimo # deprecated, old Edison
@@ -237,9 +258,9 @@ esac # !HOSTNAME
# yr_end: Year of last January to analyze
# Other options (often their default settings work well):
-# clm_md: Climatology mode, i.e., how to treat December. One of two options:
-# Seasonally-contiguous-december (SCD) mode (clm_md=scd) (default)
-# Seasonally-discontiguous-december (SDD) mode (clm_md=sdd)
+# dec_md: December mode, i.e., how to treat December. One of two options:
+# Seasonally-contiguous-december (SCD) mode (dec_md=scd) (default)
+# Seasonally-discontiguous-december (SDD) mode (dec_md=sdd)
# Both modes use an integral multiple of 12 months, and _never alter any input files_
# SCD climatologies begin in Dec of yr_srt-1, and end in Nov of yr_end
# SDD climatologies begin in Jan of yr_srt, and end in Dec of yr_end
@@ -265,7 +286,6 @@ esac # !HOSTNAME
# 'FSNT,AODVIS'. Regular expressions work, too: 'AOD.?'
# Infrequently used options:
-# bnd_nm: Name of bounds dimension (examples include 'nbnd' (default), 'tbnd' (CAM2, CAM3), 'hist_interval' (CLM2)
# dbg_lvl: 0 = Quiet, print basic status during evaluation
# 1 = Print configuration, full commands, and status to output during evaluation
# 2 = As in dbg_lvl=1, but do not evaluate commands
@@ -274,9 +294,9 @@ esac # !HOSTNAME
# fml_nm_XX_YYYYMM_YYYYMM.nc (examples include '' (default), 'control', 'experiment')
# By default, fml_nm=$caseid. Use fml_nm instead of $caseid to simplify long names, avoid overlap, etc.
# hst_nm: History volume name referring to the $hst_nm character sequence used in history tape names:
-# caseid.mdl_nm.hst_nm.YYYY-MM.nc (examples include 'h0' (default, works for cam, clm), 'h1', 'h' (for cism))
+# caseid.mdl_nm.hst_nm.YYYY-MM.nc (examples include 'h0' (default, works for cam, clm), 'h1', 'h' (for cism), 'hist' (for mpascice, mpaso)
# mdl_nm: Model name referring to the character sequence $mdl_nm used in history tape names:
-# caseid.mdl_nm.h0.YYYY-MM.nc (examples include 'cam' (default), 'clm2', 'cam2', 'cism', 'pop')
+# caseid.mdl_nm.h0.YYYY-MM.nc (examples include 'cam' (default), 'clm2', 'cam2', 'cism', 'mpaso', 'mpascice', 'pop')
# nco_opt: String of options to pass-through to NCO, e.g.,
# '-D 2 -7 -L 1' for NCO debugging level 2, netCDF4-classic output, compression level 1
# '--no_tmp_fl -x' to skip temporary files, turn extraction into exclusion list
@@ -290,7 +310,7 @@ if [ -z "${nco_exe}" ]; then
echo "ERROR: Unable to find NCO, nco_exe = ${nco_exe}"
exit 1
fi # !nco_exe
-# Use stackoverflow method to find NCO directory
+# Use StackOverflow method to find NCO directory
while [ -h "${nco_exe}" ]; do
drc_nco="$( cd -P "$( dirname "${nco_exe}" )" && pwd )"
nco_exe="$(readlink "${nco_exe}")"
@@ -302,20 +322,29 @@ nco_vrs=$(ncks --version 2>&1 >/dev/null | grep NCO | awk '{print $5}')
# When running in a terminal window (not in an non-interactive batch queue)...
if [ -n "${TERM}" ]; then
# Set fonts for legibility
- fnt_nrm=`tput sgr0` # Normal
- fnt_bld=`tput bold` # Bold
- fnt_rvr=`tput smso` # Reverse
+ if [ -x /usr/bin/tput ] && tput setaf 1 &> /dev/null; then
+ fnt_bld=`tput bold` # Bold
+ fnt_nrm=`tput sgr0` # Normal
+ fnt_rvr=`tput smso` # Reverse
+ fnt_tlc=`tput sitm` # Italic
+ else
+ fnt_bld="\e[1m" # Bold
+ fnt_nrm="\e[0m" # Normal
+ fnt_rvr="\e[07m" # Reverse
+ fnt_tlc="\e[3m" # Italic
+ fi # !tput
fi # !TERM
# Defaults for command-line options and some derived variables
# Modify these defaults to save typing later
-bnd_nm='nbnd' # [sng] Bounds dimension name (e.g., 'nbnd', 'tbnd')
-clm_md='scd' # [sng] Climatology mode ('scd' or 'sdd' as per above)
+ann_sfx='01-01-00000' # [sng] Annual file suffix (e.g., '01-01-00000')
+bnr_flg='No' # [sng] Binary method
caseid='' # [sng] Case ID
caseid_xmp='famipc5_ne30_v0.3_00003' # [sng] Case ID for examples
cf_flg='Yes' # [sng] Produce CF climatology attribute?
-lnk_flg='Yes' # [sng] Link ACME-climo to AMWG-climo filenames
+clm_md='mth' # [sng] Climatology mode ('ann', 'dly', or 'mth')
dbg_lvl=0 # [nbr] Debugging level
+dec_md='scd' # [sng] December mode ('scd' or 'sdd' as per above)
drc_in='' # [sng] Input file directory
drc_in_xmp="${DATA}/ne30/raw" # [sng] Input file directory for examples
drc_in_mps="${DATA}/mpas/raw" # [sng] Input file directory for MPAS examples
@@ -329,13 +358,15 @@ drc_rgr_xmp="${DATA}/ne30/rgr" # [sng] Regrid file directory for examples
drc_rgr_xtn='' # [sng] Regridded file directory for for extended climatology
drc_xtn='' # [sng] Directory containing extended climatology
fml_nm='' # [sng] Family name (i.e., nickname, e.g., 'amip', 'control', 'experiment')
-gaa_sng_std="--gaa climo_script=${spt_nm} --gaa climo_hostname=${HOSTNAME} --gaa climo_version=${nco_vrs}" # [sng] Global attributes to add
-hdr_pad='1000' # [B] Pad at end of header section
+gaa_sng_std="--gaa climo_script=${spt_nm} --gaa climo_command=\"'${cmd_ln}'\" --gaa climo_hostname=${HOSTNAME} --gaa climo_version=${nco_vrs}" # [sng] Global attributes to add
+hdr_pad='10000' # [B] Pad at end of header section
hst_nm='h0' # [sng] History volume (e.g., 'h0', 'h1', 'h')
-mdl_nm='cam' # [sng] Model name (e.g., 'cam', 'cam2', 'cice', 'cism', 'clm', 'clm2', 'ocn')
+lnk_flg='Yes' # [sng] Link ACME-climo to AMWG-climo filenames
+mdl_nm='cam' # [sng] Model name (e.g., 'cam', 'cam2', 'cice', 'cism', 'clm', 'clm2', 'ice', 'mpascice', 'mpaso', 'ocn')
mdl_typ='cesm' # [sng] Model type ('cesm', 'mpas') (for filenames and regridding)
mpi_flg='No' # [sng] Parallelize over nodes
nco_opt='--no_tmp_fl' # [sng] NCO options (e.g., '-7 -D 1 -L 1')
+ncr_flg='No' # [sng] Incremental method
nd_nbr=1 # [nbr] Number of nodes
par_opt='' # [sng] Parallel options to shell
par_typ='bck' # [sng] Parallelism type
@@ -348,51 +379,54 @@ rgr_opt='' # [sng] Regridding options (e.g., '--rgr col_nm=lndgrid', '--rgr col_
thr_nbr=2 # [nbr] Thread number for regridder
#var_lst='FSNT,AODVIS' # [sng] Variables to process (empty means all)
var_lst='' # [sng] Variables to process (empty means all)
-xtn_flg='No' # [sng] Extend previous climatology with current data
+xtn_flg='No' # [sng] Produce extended climatology
yr_end='1983' # [yr] End year
yr_srt='1980' # [yr] Start year
function fnc_usg_prn { # NB: dash supports fnc_nm (){} syntax, not function fnc_nm{} syntax
# Print usage
- printf "\nQuick documentation for ${fnt_bld}${spt_nm}${fnt_nrm} (read script for more thorough explanations)\n\n"
- printf "${fnt_rvr}Basic usage:${fnt_nrm} ${fnt_bld}$spt_nm -c caseid -s yr_srt -e yr_end -i drc_in -o drc_out -r rgr_map${fnt_nrm}\n\n"
- echo "Command-line options:"
- echo "${fnt_rvr}-a${fnt_nrm} ${fnt_bld}clm_md${fnt_nrm} Annual climatology mode (default ${fnt_bld}${clm_md}${fnt_nrm})"
- echo "${fnt_rvr}-b${fnt_nrm} ${fnt_bld}bnd_nm${fnt_nrm} Bounds dimension name (default ${fnt_bld}${bnd_nm}${fnt_nrm})"
- echo "${fnt_rvr}-c${fnt_nrm} ${fnt_bld}caseid${fnt_nrm} Case ID string (default ${fnt_bld}${caseid}${fnt_nrm})"
- echo "${fnt_rvr}-d${fnt_nrm} ${fnt_bld}dbg_lvl${fnt_nrm} Debug level (default ${fnt_bld}${dbg_lvl}${fnt_nrm})"
- echo "${fnt_rvr}-e${fnt_nrm} ${fnt_bld}yr_end${fnt_nrm} End year (default ${fnt_bld}${yr_end}${fnt_nrm})"
- echo "${fnt_rvr}-f${fnt_nrm} ${fnt_bld}fml_nm${fnt_nrm} Family name (nickname) (empty means none) (default ${fnt_bld}${fml_nm}${fnt_nrm})"
- echo "${fnt_rvr}-h${fnt_nrm} ${fnt_bld}hst_nm${fnt_nrm} History volume name (default ${fnt_bld}${hst_nm}${fnt_nrm})"
- echo "${fnt_rvr}-i${fnt_nrm} ${fnt_bld}drc_in${fnt_nrm} Input directory (default ${fnt_bld}${drc_in}${fnt_nrm})"
- echo "${fnt_rvr}-l${fnt_nrm} ${fnt_bld}lnk_flg${fnt_nrm} Link ACME-climo to AMWG-climo filenames (default ${fnt_bld}${lnk_flg}${fnt_nrm})"
- echo "${fnt_rvr}-m${fnt_nrm} ${fnt_bld}mdl_nm${fnt_nrm} Model name (default ${fnt_bld}${mdl_nm}${fnt_nrm})"
- echo "${fnt_rvr}-n${fnt_nrm} ${fnt_bld}nco_opt${fnt_nrm} NCO options (empty means none) (default ${fnt_bld}${nco_opt}${fnt_nrm})"
- echo "${fnt_rvr}-O${fnt_nrm} ${fnt_bld}drc_rgr${fnt_nrm} Regridded directory (default ${fnt_bld}${drc_rgr}${fnt_nrm})"
- echo "${fnt_rvr}-o${fnt_nrm} ${fnt_bld}drc_out${fnt_nrm} Output directory (default ${fnt_bld}${drc_out}${fnt_nrm})"
- echo "${fnt_rvr}-p${fnt_nrm} ${fnt_bld}par_typ${fnt_nrm} Parallelism type (default ${fnt_bld}${par_typ}${fnt_nrm})"
- echo "${fnt_rvr}-r${fnt_nrm} ${fnt_bld}rgr_map${fnt_nrm} Regrid map (empty means none) (default ${fnt_bld}${rgr_map}${fnt_nrm})"
- echo "${fnt_rvr}-R${fnt_nrm} ${fnt_bld}rgr_opt${fnt_nrm} Regrid options (empty means none) (default ${fnt_bld}${rgr_opt}${fnt_nrm})"
- echo "${fnt_rvr}-t${fnt_nrm} ${fnt_bld}thr_nbr${fnt_nrm} Thread number for regridder (default ${fnt_bld}${thr_nbr}${fnt_nrm})"
- echo "${fnt_rvr}-s${fnt_nrm} ${fnt_bld}yr_srt${fnt_nrm} Start year (default ${fnt_bld}${yr_srt}${fnt_nrm})"
- echo "${fnt_rvr}-S${fnt_nrm} ${fnt_bld}yr_prv${fnt_nrm} Start year previous climo (empty means none) (default ${fnt_bld}${yr_srt_prv}${fnt_nrm})"
- echo "${fnt_rvr}-v${fnt_nrm} ${fnt_bld}var_lst${fnt_nrm} Variable list (empty means all) (default ${fnt_bld}${var_lst}${fnt_nrm})"
- echo "${fnt_rvr}-X${fnt_nrm} ${fnt_bld}drc_xtn${fnt_nrm} Extended climo directory (default ${fnt_bld}${drc_xtn}${fnt_nrm})"
- echo "${fnt_rvr}-x${fnt_nrm} ${fnt_bld}drc_prv${fnt_nrm} Previous climo directory (default ${fnt_bld}${drc_prv}${fnt_nrm})"
- echo "${fnt_rvr}-Y${fnt_nrm} ${fnt_bld}rgr_xtn${fnt_nrm} Regridded extended climo directory (default ${fnt_bld}${drc_rgr_xtn}${fnt_nrm})"
- echo "${fnt_rvr}-y${fnt_nrm} ${fnt_bld}rgr_prv${fnt_nrm} Regridded previous climo directory (default ${fnt_bld}${drc_rgr_prv}${fnt_nrm})"
+ printf "${fnt_rvr}Basic usage:\n${fnt_nrm} ${fnt_bld}${spt_nm} -c caseid -s yr_srt -e yr_end -i drc_in -o drc_out -r rgr_map${fnt_nrm}\n"
+ printf "${fnt_nrm} ${fnt_bld}${spt_nm} --case=caseid --start_year=yr_srt --end_year=yr_end --input=drc_in --output=drc_out --map=rgr_map${fnt_nrm}\n\n"
+ echo "Command-line options [long-option synonyms in ${fnt_tlc}italics${fnt_nrm}]:"
+ echo "${fnt_rvr}-a${fnt_nrm} ${fnt_bld}dec_md${fnt_nrm} December mode (default ${fnt_bld}${dec_md}${fnt_nrm}) [${fnt_tlc}dec_md, december_mode, dec_mode${fnt_nrm}]"
+ echo "${fnt_rvr}-C${fnt_nrm} ${fnt_bld}clm_md${fnt_nrm} Climatology mode (default ${fnt_bld}${clm_md}${fnt_nrm}) [${fnt_tlc}clm_md, climatology_mode, climo_mode${fnt_nrm}]"
+ echo "${fnt_rvr}-c${fnt_nrm} ${fnt_bld}caseid${fnt_nrm} Case ID string (default ${fnt_bld}${caseid}${fnt_nrm}) [${fnt_tlc}caseid, case_id, case${fnt_nrm}]"
+ echo "${fnt_rvr}-d${fnt_nrm} ${fnt_bld}dbg_lvl${fnt_nrm} Debug level (default ${fnt_bld}${dbg_lvl}${fnt_nrm}) [${fnt_tlc}dbg_lvl, dbg, debug, debug_level${fnt_nrm}]"
+ echo "${fnt_rvr}-E${fnt_nrm} ${fnt_bld}yr_end${fnt_nrm} End year previous climo (empty means none) (default ${fnt_bld}${yr_end_prv}${fnt_nrm}) [${fnt_tlc}yr_end_prv, prv_yr_end, previous_end${fnt_nrm}]"
+ echo "${fnt_rvr}-e${fnt_nrm} ${fnt_bld}yr_end${fnt_nrm} End year (default ${fnt_bld}${yr_end}${fnt_nrm}) [${fnt_tlc}yr_end, end_yr, year_end, end_year, end${fnt_nrm}]"
+ echo "${fnt_rvr}-f${fnt_nrm} ${fnt_bld}fml_nm${fnt_nrm} Family name (nickname) (empty means none) (default ${fnt_bld}${fml_nm}${fnt_nrm}) [${fnt_tlc}fml_nm, family_name${fnt_nrm}]"
+ echo "${fnt_rvr}-h${fnt_nrm} ${fnt_bld}hst_nm${fnt_nrm} History volume name (default ${fnt_bld}${hst_nm}${fnt_nrm}) [${fnt_tlc}hst_nm, history_name, history${fnt_nrm}]"
+ echo "${fnt_rvr}-i${fnt_nrm} ${fnt_bld}drc_in${fnt_nrm} Input directory (default ${fnt_bld}${drc_in}${fnt_nrm}) [${fnt_tlc}drc_in, in_drc, dir_in, in_dir, input${fnt_nrm}]"
+ echo "${fnt_rvr}-l${fnt_nrm} ${fnt_bld}lnk_flg${fnt_nrm} Link ACME-climo to AMWG-climo filenames (default ${fnt_bld}${lnk_flg}${fnt_nrm}) [${fnt_tlc}lnk_flg, link_flag, no_amwg_links${fnt_nrm}]"
+ echo "${fnt_rvr}-m${fnt_nrm} ${fnt_bld}mdl_nm${fnt_nrm} Model name (default ${fnt_bld}${mdl_nm}${fnt_nrm}) [${fnt_tlc}mdl_nm, model_name, model${fnt_nrm}]"
+ echo "${fnt_rvr}-n${fnt_nrm} ${fnt_bld}nco_opt${fnt_nrm} NCO options (empty means none) (default ${fnt_bld}${nco_opt}${fnt_nrm}) [${fnt_tlc}nco_opt, nco, nco_options${fnt_nrm}]"
+ echo "${fnt_rvr}-O${fnt_nrm} ${fnt_bld}drc_rgr${fnt_nrm} Regridded directory (default ${fnt_bld}${drc_rgr}${fnt_nrm}) [${fnt_tlc}drc_rgr, rgr_drc, dir_regrid, regrid${fnt_nrm}]"
+ echo "${fnt_rvr}-o${fnt_nrm} ${fnt_bld}drc_out${fnt_nrm} Output directory (default ${fnt_bld}${drc_out}${fnt_nrm}) [${fnt_tlc}drc_out, out_drc, dir_out, out_dir, output${fnt_nrm}]"
+ echo "${fnt_rvr}-p${fnt_nrm} ${fnt_bld}par_typ${fnt_nrm} Parallelism type (default ${fnt_bld}${par_typ}${fnt_nrm}) [${fnt_tlc}par_typ, par_md, parallel_type, parallel_mode, parallel${fnt_nrm}]"
+ echo "${fnt_rvr}-r${fnt_nrm} ${fnt_bld}rgr_map${fnt_nrm} Regrid map (empty means none) (default ${fnt_bld}${rgr_map}${fnt_nrm}) [${fnt_tlc}rgr_map, regrid_map$, map, map_file, map_fl{fnt_nrm}]"
+ echo "${fnt_rvr}-R${fnt_nrm} ${fnt_bld}rgr_opt${fnt_nrm} Regrid options (empty means none) (default ${fnt_bld}${rgr_opt}${fnt_nrm}) [${fnt_tlc}rgr_opt, regrid_options${fnt_nrm}]"
+ echo "${fnt_rvr}-t${fnt_nrm} ${fnt_bld}thr_nbr${fnt_nrm} Thread number for regridder (default ${fnt_bld}${thr_nbr}${fnt_nrm}) [${fnt_tlc}thr_nbr, thread_number, thread, threads${fnt_nrm}]"
+ echo "${fnt_rvr}-S${fnt_nrm} ${fnt_bld}yr_srt${fnt_nrm} Start year previous climo (empty means none) (default ${fnt_bld}${yr_srt_prv}${fnt_nrm}) [${fnt_tlc}yr_srt_prv, prv_yr_srt, previous_start${fnt_nrm}]"
+ echo "${fnt_rvr}-s${fnt_nrm} ${fnt_bld}yr_srt${fnt_nrm} Start year (default ${fnt_bld}${yr_srt}${fnt_nrm}) [${fnt_tlc}yr_srt, start_yr, year_start, start_year, start${fnt_nrm}]"
+ echo "${fnt_rvr}-v${fnt_nrm} ${fnt_bld}var_lst${fnt_nrm} Variable list (empty means all) (default ${fnt_bld}${var_lst}${fnt_nrm}) [${fnt_tlc}var_lst, variable_list, variable, variables${fnt_nrm}]"
+ echo "${fnt_rvr}-X${fnt_nrm} ${fnt_bld}drc_xtn${fnt_nrm} Extended climo directory (default ${fnt_bld}${drc_xtn}${fnt_nrm}) [${fnt_tlc}drc_xtn, xtn_drc, extended_dir, extended_climo, extended${fnt_nrm}]"
+ echo "${fnt_rvr}-x${fnt_nrm} ${fnt_bld}drc_prv${fnt_nrm} Previous climo directory (default ${fnt_bld}${drc_prv}${fnt_nrm}) [${fnt_tlc}drc_prv, prv_drc, previous_dir, previous_climo, previous${fnt_nrm}]"
+ echo "${fnt_rvr}-Y${fnt_nrm} ${fnt_bld}rgr_xtn${fnt_nrm} Regridded extended climo directory (default ${fnt_bld}${drc_rgr_xtn}${fnt_nrm}) [${fnt_tlc}drc_rgr_xtn, drc_xtn_rgr, regridded_extended, extended_regridded${fnt_nrm}]"
+ echo "${fnt_rvr}-y${fnt_nrm} ${fnt_bld}rgr_prv${fnt_nrm} Regridded previous climo directory (default ${fnt_bld}${drc_rgr_prv}${fnt_nrm}) [${fnt_tlc}drc_rgr_prv, drc_prv_rgr, regridded_previous, prefvious_regridded${fnt_nrm}]"
printf "\n"
- printf "Examples: ${fnt_bld}$spt_nm -c ${caseid_xmp} -s ${yr_srt} -e ${yr_end} -i ${drc_in_xmp} -o ${drc_out_xmp} ${fnt_nrm}\n"
- printf " ${fnt_bld}$spt_nm -c ${caseid_xmp} -s ${yr_srt} -e ${yr_end} -i ${drc_in_xmp} -o ${drc_out_xmp} -r ~zender/data/maps/map_ne30np4_to_fv129x256_aave.20150901.nc ${fnt_nrm}\n"
- printf " ${fnt_bld}$spt_nm -c control -m clm2 -s ${yr_srt} -e ${yr_end} -i ${drc_in_xmp} -o ${drc_out_xmp} -r ~zender/data/maps/map_ne30np4_to_fv129x256_aave.20150901.nc ${fnt_nrm}\n"
- printf " ${fnt_bld}$spt_nm -c hist -m ice -s ${yr_srt} -e ${yr_end} -i ${drc_in_mps} -o ${drc_out_mps} -r ~zender/data/maps/map_oEC60to30_to_t62_bilin.20160301.nc ${fnt_nrm}\n"
- printf " ${fnt_bld}$spt_nm -c hist -m ocn -p mpi -s 1 -e 5 -i ${drc_in_mps} -o ${drc_out_mps} -r ~zender/data/maps/map_oEC60to30_to_t62_bilin.20160301.nc ${fnt_nrm}\n\n"
- printf "Interactive batch queues on ...\n"
+ printf "${fnt_rvr}Examples:${fnt_nrm}\n${fnt_bld}${spt_nm} -c ${caseid_xmp} -s ${yr_srt} -e ${yr_end} -i ${drc_in_xmp} -o ${drc_out_xmp} ${fnt_nrm}\n"
+ printf "${fnt_bld}${spt_nm} -c ${caseid_xmp} -s ${yr_srt} -e ${yr_end} -i ${drc_in_xmp} -o ${drc_out_xmp} -r ~zender/data/maps/map_ne30np4_to_fv129x256_aave.20150901.nc ${fnt_nrm}\n"
+ printf "${fnt_bld}${spt_nm} -c control -m clm2 -s ${yr_srt} -e ${yr_end} -i ${drc_in_xmp} -o ${drc_out_xmp} -r ~zender/data/maps/map_ne30np4_to_fv129x256_aave.20150901.nc ${fnt_nrm}\n"
+ printf "${fnt_bld}${spt_nm} -m mpascice -s ${yr_srt} -e ${yr_end} -i ${drc_in_mps} -o ${drc_out_mps} -r ~zender/data/maps/map_oEC60to30_to_t62_bilin.20160301.nc ${fnt_nrm}\n"
+ printf "${fnt_bld}${spt_nm} -m mpaso -p mpi -s 1 -e 5 -i ${drc_in_mps} -o ${drc_out_mps} -r ~zender/data/maps/map_oEC60to30_to_t62_bilin.20160301.nc ${fnt_nrm}\n\n"
+ printf "${fnt_rvr}Interactive batch queues:${fnt_nrm}\n"
+ printf "blues : qsub -I -A ACME -q acme -l nodes=1 -l walltime=00:30:00 -N ncclimo\n"
printf "cooley: qsub -I -A HiRes_EarthSys --nodecount=1 --time=00:30:00 --jobname=ncclimo\n"
printf "cori : salloc -A acme --nodes=1 --time=00:30:00 --partition=debug --job-name=ncclimo\n"
printf "edison: salloc -A acme --nodes=1 --time=00:30:00 --partition=debug --job-name=ncclimo\n"
printf "rhea : qsub -I -A CLI115 -V -l nodes=1 -l walltime=00:30:00 -N ncclimo\n"
- printf "rhea : qsub -I -A CLI115 -V -l nodes=1 -l walltime=00:30:00 -lpartition=gpu -N ncclimo # Bigmem\n\n"
+ printf "rhea : qsub -I -A CLI115 -V -l nodes=1 -l walltime=00:30:00 -lpartition=gpu -N ncclimo # Bigmem\n"
+ printf "\nComplete documentation at http://nco.sf.net/nco.html#${spt_nm}\n\n"
# echo "3-yrs ne30: ncclimo -c famipc5_ne30_v0.3_00003 -s 1980 -e 1982 -i /lustre/atlas1/cli115/world-shared/mbranst/famipc5_ne30_v0.3_00003-wget-test -o ${DATA}/ne30/clm -r ~zender/data/maps/map_ne30np4_to_fv129x256_aave.20150901.nc > ~/ncclimo.out 2>&1 &"
# printf "3-yrs ne120: ncclimo -p mpi -c famipc5_ne120_v0.3_00003 -s 1980 -e 1982 -i /lustre/atlas1/cli115/world-shared/mbranst/famipc5_ne120_v0.3_00003-wget-test -o ${DATA}/ne120/clm -r ~zender/data/maps/map_ne120np4_to_fv257x512_aave.20150901.nc > ~/ncclimo.out 2>&1 &\n\n"
exit 1
@@ -440,42 +474,89 @@ if [ ${arg_nbr} -eq 0 ]; then
fi # !arg_nbr
# Parse command-line options:
-# http://stackoverflow.com/questions/402377/using-getopts-in-bash-shell-script-to-get-long-and-short-command-line-options
+# http://stackoverflow.com/questions/402377/using-getopts-in-bash-shell-script-to-get-long-and-short-command-line-options (see method by Adam Katz)
# http://tuxtweaks.com/2014/05/bash-getopts
-cmd_ln="${spt_nm} ${@}"
-while getopts :a:b:c:d:e:f:h:i:l:m:n:O:o:p:R:r:S:s:t:v:X:x:Y:y: OPT; do
+while getopts :a:C:c:d:E:e:f:h:i:l:m:n:O:o:p:R:r:S:s:t:v:X:x:Y:y:-: OPT; do
case ${OPT} in
- a) clm_md=${OPTARG} ;; # Climatology mode
- b) bnd_nm=${OPTARG} ;; # Bounds dimension name
- c) caseid=${OPTARG} ;; # CASEID
- d) dbg_lvl=${OPTARG} ;; # Debugging level
- e) yr_end=${OPTARG} ;; # End year
- f) fml_nm=${OPTARG} ;; # Family name
- h) hst_nm=${OPTARG} ;; # History tape name
- i) drc_in=${OPTARG} ;; # Input directory
- l) lnk_flg=${OPTARG} ;; # Link ACME to AMWG name
- m) mdl_nm=${OPTARG} ;; # Model name
- n) nco_opt=${OPTARG} ;; # NCO options
- o) drc_out_usr=${OPTARG} ;; # Output directory
- O) drc_rgr_usr=${OPTARG} ;; # Regridded directory
- p) par_typ=${OPTARG} ;; # Parallelism type
- R) rgr_opt=${OPTARG} ;; # Regridding options
- r) rgr_map=${OPTARG} ;; # Regridding map
- S) yr_srt_prv=${OPTARG} ;; # Start year previous
- s) yr_srt=${OPTARG} ;; # Start year
- t) thr_usr=${OPTARG} ;; # Thread number
- v) var_lst=${OPTARG} ;; # Variables
- X) drc_xtn=${OPTARG} ;; # Extended climo directory
- x) drc_prv=${OPTARG} ;; # Previous climo directory
- Y) drc_rgr_xtn=${OPTARG} ;; # Regridded extended climo directory
- y) drc_rgr_prv=${OPTARG} ;; # Regridded previous climo directory
+ a) dec_md="${OPTARG}" ;; # December mode
+ C) clm_md="${OPTARG}" ;; # Climatology mode
+ c) caseid="${OPTARG}" ;; # CASEID
+ d) dbg_lvl="${OPTARG}" ;; # Debugging level
+ E) yr_end_prv="${OPTARG}" ;; # End year previous
+ e) yr_end="${OPTARG}" ;; # End year
+ f) fml_nm="${OPTARG}" ;; # Family name
+ h) hst_nm="${OPTARG}" ;; # History tape name
+ i) drc_in="${OPTARG}" ;; # Input directory
+ l) lnk_flg="${OPTARG}" ;; # Link ACME to AMWG name
+ m) mdl_nm="${OPTARG}" ;; # Model name
+ n) nco_opt="${OPTARG}" ;; # NCO options
+ o) drc_out_usr="${OPTARG}" ;; # Output directory
+ O) drc_rgr_usr="${OPTARG}" ;; # Regridded directory
+ p) par_typ="${OPTARG}" ;; # Parallelism type
+ R) rgr_opt="${OPTARG}" ;; # Regridding options
+ r) rgr_map="${OPTARG}" ;; # Regridding map
+ S) yr_srt_prv="${OPTARG}" ;; # Start year previous
+ s) yr_srt="${OPTARG}" ;; # Start year
+ t) thr_usr="${OPTARG}" ;; # Thread number
+ v) var_lst="${OPTARG}" ;; # Variables
+ X) drc_xtn="${OPTARG}" ;; # Extended climo directory
+ x) drc_prv="${OPTARG}" ;; # Previous climo directory
+ Y) drc_rgr_xtn="${OPTARG}" ;; # Regridded extended climo directory
+ y) drc_rgr_prv="${OPTARG}" ;; # Regridded previous climo directory
+ -) LONG_OPTARG="${OPTARG#*=}"
+ case ${OPTARG} in
+ # Hereafter ${OPTARG} is long argument key, and ${LONG_OPTARG}, if any, is long argument value
+ # Long options with no argument, no short option counterpart
+ # Long options with argument, no short option counterpart
+ # Long options with short counterparts, ordered by short option key
+ dec_md=?* | december_mode=?* | dec_mode=?* ) dec_md="${LONG_OPTARG}" ;; # -a # December mode
+ clm_md=?* | climatology_mode=?* | climo_mode=?* ) clm_md="${LONG_OPTARG}" ;; # -C # Climatology mode
+ caseid=?* | case_id=?* | case=?* ) caseid="${LONG_OPTARG}" ;; # -c # CASEID
+ dbg_lvl=?* | dbg=?* | debug=?* | debug_level=?* ) dbg_lvl="${LONG_OPTARG}" ;; # -d # Debugging level
+ yr_end_prv=?* | prv_yr_end=?* | previous_end=?* ) yr_end_prv="${LONG_OPTARG}" ;; # -E # End year previous
+ yr_end=?* | end_yr=?* | year_end=?* | end_year=?* | end=?* ) yr_end="${LONG_OPTARG}" ;; # -e # End year
+ fml_nm=?* | family_name=?* | family=?* ) fml_nm="${LONG_OPTARG}" ;; # -f # Family name
+ hst_nm=?* | history_name=?* | history=?* ) hst_nm="${LONG_OPTARG}" ;; # -h # History tape name
+ drc_in=?* | in_drc=?* | dir_in=?* | in_dir=?* | input=?* ) drc_in="${LONG_OPTARG}" ;; # -i # Input directory
+ lnk_flg | link_flag | no_amwg_link | no_amwg_links | no_AMWG_link | no_AMWG_links ) lnk_flg='No' ;; # -l # Link ACME to AMWG name
+ lnk_flg=?* | link_flag=?* | no_amwg_link=?* | no_amwg_links=?* | no_AMWG_link=?* | no_AMWG_links=?* ) echo "No argument allowed for --${OPTARG switch}" >&2; exit 1 ;; # -l # Link ACME to AMWG name
+ mdl_nm=?* | model_name=?* | model=?* ) mdl_nm="${LONG_OPTARG}" ;; # -m # Model name
+ nco_opt=?* | nco=?* | nco_options=?* ) nco_opt="${LONG_OPTARG}" ;; # -n # NCO options
+ drc_out=?* | out_drc=?* | dir_out=?* | out_dir=?* | output=?* ) drc_out_usr="${LONG_OPTARG}" ;; # -o # Output directory
+ drc_rgr=?* | rgr_drc=?* | dir_regrid=?* | regrid_dir=?* | regrid=?* ) drc_rgr_usr="${LONG_OPTARG}" ;; # -O # Regridded directory
+ par_typ=?* | par_md=?* | parallel_type=?* | parallel_mode=?* | parallel=?* ) par_typ="${LONG_OPTARG}" ;; # -p # Parallelism type
+ rgr_opt=?* | regrid_options=?* ) rgr_opt="${LONG_OPTARG}" ;; # -R # Regridding options
+ rgr_map=?* | regrid_map=?* | map=?* ) rgr_map="${LONG_OPTARG}" ;; # -r # Regridding map
+ yr_srt_prv=?* | prv_yr_srt=?* | previous_start=?* ) yr_srt_prv="${LONG_OPTARG}" ;; # -S # Start year previous
+ yr_srt=?* | start_yr=?* | year_start=?* | start_year=?* | start=?* ) yr_srt="${LONG_OPTARG}" ;; # -s # Start year
+ thr_nbr=?* | thread_number=?* | thread=?* | threads=?* ) thr_usr="${LONG_OPTARG}" ;; # -t # Thread number
+ var_lst=?* | variable_list=?* | variable=?* | variables=?* ) var_lst="${LONG_OPTARG}" ;; # -v # Variables
+ drc_xtn=?* | xtn_drc=?* | extended_dir=?* | extended_climo=?* | extended=?* ) drc_xtn="${LONG_OPTARG}" ;; # -X # Extended climo directory
+ drc_prv=?* | prv_drc=?* | previous_dir=?* | previous_climo=?* | previous=?* ) drc_prv="${LONG_OPTARG}" ;; # -x # Previous climo directory
+ drc_rgr_xtn=?* | drc_xtn_rgr=?* | regridded_extended=?* | extended_regridded=?* ) drc_rgr_xtn="${LONG_OPTARG}" ;; # -Y # Regridded extended climo directory
+ drc_rgr_prv=?* | drc_prv_rgr=?* | regridded_previous=?* | previous_regridded=?* ) drc_rgr_prv="${LONG_OPTARG}" ;; # -y # Regridded previous climo directory
+ '' ) break ;; # "--" terminates argument processing
+ * ) printf "\nERROR: Illegal option ${fnt_bld}--${OPTARG}${fnt_nrm}" >&2; fnc_usg_prn ;;
+ esac ;;
\?) # Unrecognized option
- printf "\nERROR: Option ${fnt_bld}-$OPTARG${fnt_nrm} not allowed"
+ printf "\nERROR: Option ${fnt_bld}-${OPTARG}${fnt_nrm} not allowed" >&2
fnc_usg_prn ;;
esac
done
shift $((OPTIND-1)) # Advance one argument
+# Determine mode first (this helps determine other defaults)
+if [ -n "${yr_srt_prv}" ]; then
+ # Specifying only yr_srt_prv implies incremental method
+ # Specifying both yr_srt_prv and yr_end_prv implies binary method
+ xtn_flg='Yes'
+ if [ -n "${yr_end_prv}" ]; then
+ bnr_flg='Yes'
+ else # !yr_end_prv binary method
+ ncr_flg='Yes'
+ fi # !yr_end_prv binary method
+fi # !yr_srt_prv extended climo
+
# Derived variables
if [ -n "${drc_out_usr}" ]; then
# Fancy %/ syntax removes trailing slash (e.g., from $TMPDIR)
@@ -489,7 +570,12 @@ fi # !drc_rgr_usr
if [ -n "${drc_prv}" ]; then
drc_prv="${drc_prv%/}"
else
- drc_prv="${drc_out}"
+ if [ "${bnr_flg}" = 'Yes' ]; then
+ drc_prv="${drc_in}"
+ fi # !bnr_flg
+ if [ "${ncr_flg}" = 'Yes' ]; then
+ drc_prv="${drc_out}"
+ fi # !ncr_flg
fi # !drc_prv
if [ -n "${drc_xtn}" ]; then
drc_xtn="${drc_xtn%/}"
@@ -522,18 +608,16 @@ let yr_nbr=${yr_end_rth}-${yr_srt_rth}+1
# Derived variables
out_nm=${caseid}
-if [ "${caseid}" = 'hist' ]; then
+if [ "${caseid}" = 'hist' ] || [ "${mdl_nm}" = 'mpaso' ] || [ "${mdl_nm}" = 'mpascice' ]; then
mdl_typ='mpas'
fi # !caseid
if [ "${mdl_typ}" = 'mpas' ]; then
- out_nm="mpas_${mdl_nm}"
+ out_nm="${mdl_nm}"
+ hst_nm='hist'
fi # !mdl_typ
if [ -n "${fml_nm}" ]; then
out_nm="${fml_nm}"
fi # !fml_nm
-if [ "${mdl_nm}" = 'cam2' ]; then
- bnd_nm='tbnd'
-fi # !caseid
# http://stackoverflow.com/questions/965053/extract-filename-and-extension-in-bash
# http://stackoverflow.com/questions/17420994/bash-regex-match-string
if [[ "${caseid}" =~ ^(.*)([0-9][0-9][0-9][0-9][01][0-9].nc.?)$ ]]; then
@@ -545,6 +629,16 @@ if [[ "${caseid}" =~ ^(.*)([0-9][0-9][0-9][0-9][01][0-9].nc.?)$ ]]; then
out_nm=${bs_nm}
bs_sfx="${caseid#*.}"
fi # !caseid
+if [ "${clm_md}" != 'ann' ] && [ "${clm_md}" != 'dly' ] && [ "${clm_md}" != 'mth' ]; then
+ echo "ERROR: User-defined climatology mode is ${clm_md}. Valid options are 'ann', 'dly', or 'mth' (default)"
+ exit 1
+fi # !clm_md
+if [ "${clm_md}" = 'ann' ]; then
+ clm_nbr=1
+ dec_md='sdd'
+elif [ "${clm_md}" = 'mth' ]; then
+ clm_nbr=17
+fi # !clm_md
if [ -n "${gaa_sng_std}" ]; then
if [ "${yr_nbr}" -gt 1 ] ; then
@@ -592,7 +686,7 @@ mm_ann_end='12' # [idx] Last month used in annual climatology
mm_djf_srt='01' # [idx] First month used in DJF climatology
mm_djf_end='12' # [idx] Last month used in DJF climatology
yr_cln=${yr_nbr} # [nbr] Calendar years in climatology
-if [ ${clm_md} = 'scd' ]; then
+if [ ${dec_md} = 'scd' ]; then
yyyy_clm_srt_dec=`printf "%04d" ${yr_srtm1}`
yyyy_clm_end_dec=`printf "%04d" ${yr_endm1}`
mm_ann_srt='12'
@@ -664,10 +758,11 @@ fi # !mpi
# Print initial state
if [ ${dbg_lvl} -ge 1 ]; then
- printf "dbg: bnd_nm = ${bnd_nm}\n"
+ printf "dbg: bnr_flg = ${bnr_flg}\n"
printf "dbg: caseid = ${caseid}\n"
printf "dbg: cf_flg = ${cf_flg}\n"
printf "dbg: clm_md = ${clm_md}\n"
+ printf "dbg: dec_md = ${dec_md}\n"
printf "dbg: dbg_lvl = ${dbg_lvl}\n"
printf "dbg: drc_in = ${drc_in}\n"
printf "dbg: drc_nco = ${drc_nco}\n"
@@ -683,8 +778,10 @@ if [ ${dbg_lvl} -ge 1 ]; then
printf "dbg: hst_nm = ${hst_nm}\n"
printf "dbg: lnk_flg = ${lnk_flg}\n"
printf "dbg: mdl_nm = ${mdl_nm}\n"
+ printf "dbg: mdl_typ = ${mdl_typ}\n"
printf "dbg: mpi_flg = ${mpi_flg}\n"
printf "dbg: nco_opt = ${nco_opt}\n"
+ printf "dbg: ncr_flg = ${ncr_flg}\n"
printf "dbg: nd_nbr = ${nd_nbr}\n"
printf "dbg: par_typ = ${par_typ}\n"
printf "dbg: rgr_map = ${rgr_map}\n"
@@ -708,8 +805,12 @@ if [ ${dbg_lvl} -ge 2 ]; then
fi # !dbg
# Create output directory
-mkdir -p ${drc_out}
-mkdir -p ${drc_rgr}
+if [ -n "${drc_out}" ]; then
+ mkdir -p ${drc_out}
+fi # !drc_out
+if [ -n "${drc_rgr}" ]; then
+ mkdir -p ${drc_rgr}
+fi # !drc_rgr
# Human-readable summary
date_srt=$(date +"%s")
@@ -719,21 +820,40 @@ if [ ${dbg_lvl} -ge 0 ]; then
fi # !dbg
printf "Started climatology generation for dataset ${caseid} at `date`.\n"
printf "Running climatology script ${spt_nm} from directory ${drc_spt}\n"
-printf "NCO version ${nco_vrs} from directory ${drc_nco}\n"
-printf "Input files in directory ${drc_in}\n"
+printf "NCO binaries version ${nco_vrs} from directory ${drc_nco}\n"
+if [ "${xtn_flg}" = 'No' ]; then
+ printf "Producing standard climatology from raw input files in directory ${drc_in}\n"
+ printf "Output files to directory ${drc_out}\n"
+fi # !xtn_flg
+if [ "${bnr_flg}" = 'Yes' ]; then
+ printf "Producing extended climatology in binary mode: Will combine pre-computed climatology in directory ${drc_prv} with pre-computed climatology in directory ${drc_in}\n"
+ printf "Output files to directory ${drc_xtn}\n"
+fi # !bnr_flg
+if [ "${ncr_flg}" = 'Yes' ]; then
+ printf "Producing extended climatology in incremental mode: Pre-computed climatology in directory ${drc_prv} will be incremented by raw input files in directory ${drc_in}\n"
+ printf "Output files to directory ${drc_xtn}\n"
+fi # !ncr_flg
#printf "Intermediate/temporary files written to directory ${drc_tmp}\n"
-printf "Output files to directory ${drc_out}\n"
+if [ "${bnr_flg}" = 'No' ]; then
+ printf "Climatology from ${yr_nbr} years of contiguous raw data touching ${yr_cln} calendar years from YYYYMM = ${yyyy_clm_srt_dec}${mm_ann_srt} to ${yyyy_end}${mm_ann_end}.\n"
+fi # !bnr_flg
if [ "${mdl_typ}" = 'yyyymm' ]; then
printf "Filenames will be constructed with generic conventions as ${bs_nm}_YYYYMM.${bs_sfx}\n"
else # !mdl_typ
printf "Filenames will be constructed with CESM'ish or ACME'ish conventions.\n"
fi # !mdl_typ
-printf "Climatology from ${yr_nbr} years of contiguous data crossing ${yr_cln} calendar years from YYYYMM = ${yyyy_clm_srt_dec}${mm_ann_srt} to ${yyyy_end}${mm_ann_end}.\n"
-if [ ${clm_md} = 'scd' ]; then
- printf "Winter statistics based on seasonally contiguous December (scd-mode): DJF sequences are consecutive months that cross calendar-year boundaries.\n"
-else
- printf "Winter statistics based on seasonally discontiguous December (sdd-mode): DJF sequences comprise three months from the same calendar year.\n"
-fi # !scd
+if [ ${clm_md} = 'ann' ]; then
+ printf "Each input file assumed to contain mean of one year.\n"
+elif [ ${clm_md} = 'mth' ]; then
+ printf "Each input file assumed to contain mean of one month.\n"
+fi # !mth
+if [ ${clm_md} = 'mth' ]; then
+ if [ ${dec_md} = 'scd' ]; then
+ printf "Winter statistics based on seasonally contiguous December (scd-mode): DJF sequences are consecutive months that cross calendar-year boundaries.\n"
+ else
+ printf "Winter statistics based on seasonally discontiguous December (sdd-mode): DJF sequences comprise three months from the same calendar year.\n"
+ fi # !scd
+fi # !mth
if [ ${cf_flg} = 'Yes' ]; then
printf "Annotation for CF climatology attribute and climatology_bounds variable will be performed.\n"
else
@@ -745,511 +865,606 @@ else
printf "This climatology will not be regridded.\n"
fi # !rgr
-# Block 1: Climatological monthly means
-# Block 1 Loop 1: Generate, check, and store (but do not yet execute) monthly commands
-printf "Generating climatology...\n"
-clm_idx=0
-for mth in {01..12}; do
- let clm_idx=${clm_idx}+1
- MM=`printf "%02d" ${clm_idx}`
- yr_fl=''
- for yr in `seq ${yyyy_srt} ${yyyy_end}`; do
- YYYY=`printf "%04d" ${yr}`
- if [ ${mdl_typ} = 'cesm' ]; then
- yr_fl="${yr_fl} ${caseid}.${mdl_nm}.${hst_nm}.${YYYY}-${MM}.nc"
- elif [ ${mdl_typ} = 'mpas' ]; then # Use MPAS not CESM conventions
- yr_fl="${yr_fl} ${caseid}.${mdl_nm}.${YYYY}-${MM}-01_00.00.00.nc"
- elif [ ${mdl_typ} = 'yyyymm' ]; then # Generate from caseid + YYYYMM
- yr_fl="${yr_fl} ${bs_nm}_${YYYY}${MM}.${bs_sfx}"
- fi # !cesm
- done # !yr
- if [ ${clm_md} = 'scd' ] && [ ${MM} = '12' ]; then
+# Block 1: Generate, check, and store (but do not yet execute) commands
+
+# Block 1 Loop 1: Climatologies based on monthly means
+if [ "${clm_md}" = 'mth' ]; then
+ clm_idx=0
+ for mth in {01..12}; do
+ let clm_idx=${clm_idx}+1
+ MM=`printf "%02d" ${clm_idx}`
yr_fl=''
- for yr in `seq ${yr_srtm1} ${yr_endm1}`; do
+ for yr in `seq ${yyyy_srt} ${yyyy_end}`; do
YYYY=`printf "%04d" ${yr}`
if [ ${mdl_typ} = 'cesm' ]; then
yr_fl="${yr_fl} ${caseid}.${mdl_nm}.${hst_nm}.${YYYY}-${MM}.nc"
elif [ ${mdl_typ} = 'mpas' ]; then # Use MPAS not CESM conventions
- yr_fl="${yr_fl} ${caseid}.${mdl_nm}.${YYYY}-${MM}-01_00.00.00.nc"
+ # 20161130: Old MPAS rule until today
+ # yr_fl="${yr_fl} ${caseid}.${mdl_nm}.${YYYY}-${MM}-01_00.00.00.nc"
+ # Example file: /scratch2/scratchdirs/golaz/ACME_simulations/20161117.beta0.A_WCYCL1850S.ne30_oEC_ICG.edison/run/mpascice.hist.am.timeSeriesStatsMonthly.0001-02-01.nc
+ yr_fl="${yr_fl} ${mdl_nm}.hist.am.timeSeriesStatsMonthly.${YYYY}-${MM}-01.nc"
elif [ ${mdl_typ} = 'yyyymm' ]; then # Generate from caseid + YYYYMM
yr_fl="${yr_fl} ${bs_nm}_${YYYY}${MM}.${bs_sfx}"
fi # !cesm
done # !yr
- yyyy_clm_srt=${yyyy_clm_srt_dec}
- yyyy_clm_end=${yyyy_clm_end_dec}
- fi # !scd
- for fl_in in ${yr_fl} ; do
- if [ ! -f "${drc_in}/${fl_in}" ]; then
- echo "ERROR: Unable to find required input file ${drc_in}/${fl_in}"
- echo "HINT: All files implied to exist by the climatology bounds (start/end year/month) must be in ${drc_in} before ${spt_nm} will proceed"
- exit 1
- fi # ! -f
- done # !fl_in
- fl_out[${clm_idx}]="${drc_out}/${out_nm}_${MM}_${yyyy_clm_srt}${MM}_${yyyy_clm_end}${MM}_climo.nc"
- cmd_clm[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncra --cb -O ${nco_opt} -p ${drc_in} ${yr_fl} ${fl_out[${clm_idx}]}"
-done # !mth
+ if [ ${dec_md} = 'scd' ] && [ ${MM} = '12' ]; then
+ yr_fl=''
+ for yr in `seq ${yr_srtm1} ${yr_endm1}`; do
+ YYYY=`printf "%04d" ${yr}`
+ if [ ${mdl_typ} = 'cesm' ]; then
+ yr_fl="${yr_fl} ${caseid}.${mdl_nm}.${hst_nm}.${YYYY}-${MM}.nc"
+ elif [ ${mdl_typ} = 'mpas' ]; then # Use MPAS not CESM conventions
+ yr_fl="${yr_fl} ${mdl_nm}.hist.am.timeSeriesStatsMonthly.${YYYY}-${MM}-01.nc"
+ elif [ ${mdl_typ} = 'yyyymm' ]; then # Generate from caseid + YYYYMM
+ yr_fl="${yr_fl} ${bs_nm}_${YYYY}${MM}.${bs_sfx}"
+ fi # !cesm
+ done # !yr
+ yyyy_clm_srt=${yyyy_clm_srt_dec}
+ yyyy_clm_end=${yyyy_clm_end_dec}
+ fi # !scd
+ # Check for existence of raw input only when file will be used
+ if [ "${bnr_flg}" = 'No' ]; then
+ for fl_in in ${yr_fl} ; do
+ if [ ! -f "${drc_in}/${fl_in}" ]; then
+ echo "ERROR: Unable to find required input file ${drc_in}/${fl_in}"
+ echo "HINT: All files implied to exist by the climatology bounds (start/end year/month) must be in ${drc_in} before ${spt_nm} will proceed"
+ exit 1
+ fi # ! -f
+ done # !fl_in
+ else # !bnr_flg
+ # In binary mode drc_out is actually used to locate input files from climatology B (same as output files in incremental mode)
+ drc_out="${drc_in}"
+ fi # !bnr_flg
+ fl_out[${clm_idx}]="${drc_out}/${out_nm}_${MM}_${yyyy_clm_srt}${MM}_${yyyy_clm_end}${MM}_climo.nc"
+ cmd_clm[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncra --cb -O ${nco_opt} -p ${drc_in} ${yr_fl} ${fl_out[${clm_idx}]}"
+ done # !mth
-# Monthly output filenames constructed above; specify remaining (seasonal, annual) output names
-fl_out[13]="${drc_out}/${out_nm}_MAM_${yyyy_srt}03_${yyyy_end}05_climo.nc"
-fl_out[14]="${drc_out}/${out_nm}_JJA_${yyyy_srt}06_${yyyy_end}08_climo.nc"
-fl_out[15]="${drc_out}/${out_nm}_SON_${yyyy_srt}09_${yyyy_end}11_climo.nc"
-fl_out[16]="${drc_out}/${out_nm}_DJF_${yyyy_clm_srt_dec}${mm_djf_srt}_${yyyy_end}${mm_djf_end}_climo.nc"
-fl_out[17]="${drc_out}/${out_nm}_ANN_${yyyy_clm_srt_dec}${mm_ann_srt}_${yyyy_end}${mm_ann_end}_climo.nc"
-# Derive all seventeen regridded and AMWG names from output names
-for ((clm_idx=1;clm_idx<=17;clm_idx++)); do
- fl_amwg[${clm_idx}]=`expr match "${fl_out[${clm_idx}]}" '\(.*\)_.*_.*_climo.nc'` # Prune _YYYYYMM_YYYYMM_climo.nc
- fl_amwg[${clm_idx}]="${fl_amwg[${clm_idx}]}_climo.nc" # Replace with _climo.nc
- fl_amwg[${clm_idx}]="${fl_amwg[${clm_idx}]/${drc_out}\//}" # Delete prepended path to ease symlinking
- if [ -n "${rgr_map}" ]; then
- fl_rgr[${clm_idx}]="${fl_out[${clm_idx}]/${drc_out}/${drc_rgr}}"
- if [ "${drc_out}" = "${drc_rgr}" ]; then
- # Append geometry suffix to regridded files in same directory as native climo
- # http://tldp.org/LDP/abs/html/string-manipulation.html
- dfl_sfx='rgr'
- rgr_sfx=`expr match "${rgr_map}" '.*_to_\(.*\).nc'`
- if [ "${#rgr_sfx}" -eq 0 ]; then
- printf "${spt_nm}: WARNING Unable to extract geometric suffix from mapfile, will suffix regridded files with \"${dfl_sfx}\" instead\n"
- rgr_sfx=${dfl_sfx}
- else
- yyyymmdd_sng=`expr match "${rgr_sfx}" '.*\(\.[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]\)'` # Find YYYYYMMDD
- if [ "${#yyyymmdd_sng}" -ne 0 ]; then
- rgr_sfx=${rgr_sfx%%${yyyymmdd_sng}} # Delete YYYYYMMDD
+ # Monthly output filenames constructed above; specify remaining (seasonal, annual) output names
+ fl_out[13]="${drc_out}/${out_nm}_MAM_${yyyy_srt}03_${yyyy_end}05_climo.nc"
+ fl_out[14]="${drc_out}/${out_nm}_JJA_${yyyy_srt}06_${yyyy_end}08_climo.nc"
+ fl_out[15]="${drc_out}/${out_nm}_SON_${yyyy_srt}09_${yyyy_end}11_climo.nc"
+ fl_out[16]="${drc_out}/${out_nm}_DJF_${yyyy_clm_srt_dec}${mm_djf_srt}_${yyyy_end}${mm_djf_end}_climo.nc"
+ fl_out[17]="${drc_out}/${out_nm}_ANN_${yyyy_clm_srt_dec}${mm_ann_srt}_${yyyy_end}${mm_ann_end}_climo.nc"
+ # Derive all seventeen regridded and AMWG names from output names
+ for ((clm_idx=1;clm_idx<=17;clm_idx++)); do
+ fl_amwg[${clm_idx}]=`expr match "${fl_out[${clm_idx}]}" '\(.*\)_.*_.*_climo.nc'` # Prune _YYYYYMM_YYYYMM_climo.nc
+ fl_amwg[${clm_idx}]="${fl_amwg[${clm_idx}]}_climo.nc" # Replace with _climo.nc
+ fl_amwg[${clm_idx}]="${fl_amwg[${clm_idx}]/${drc_out}\//}" # Delete prepended path to ease symlinking
+ if [ -n "${rgr_map}" ]; then
+ fl_rgr[${clm_idx}]="${fl_out[${clm_idx}]/${drc_out}/${drc_rgr}}"
+ if [ "${drc_out}" = "${drc_rgr}" ]; then
+ # Append geometry suffix to regridded files in same directory as native climo
+ # http://tldp.org/LDP/abs/html/string-manipulation.html
+ dfl_sfx='rgr'
+ rgr_sfx=`expr match "${rgr_map}" '.*_to_\(.*\).nc'`
+ if [ "${#rgr_sfx}" -eq 0 ]; then
+ printf "${spt_nm}: WARNING Unable to extract geometric suffix from mapfile, will suffix regridded files with \"${dfl_sfx}\" instead\n"
+ rgr_sfx=${dfl_sfx}
+ else
+ yyyymmdd_sng=`expr match "${rgr_sfx}" '.*\(\.[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]\)'` # Find YYYYYMMDD
+ if [ "${#yyyymmdd_sng}" -ne 0 ]; then
+ rgr_sfx=${rgr_sfx%%${yyyymmdd_sng}} # Delete YYYYYMMDD
+ fi # !strlen
fi # !strlen
- fi # !strlen
- # rgr_sfx=`expr match "${rgr_sfx}" '\(.*\)\.[0-9][0-9][0-9][0-9][0-9][0-9]'` #
- fl_rgr[${clm_idx}]="${fl_rgr[${clm_idx}]/.nc/_${rgr_sfx}.nc}"
- fi # !drc_rgr
- fi # !rgr_map
-done # !clm_idx
-
-# Block 1 Loop 2: Execute and/or echo monthly climatology commands
-for ((clm_idx=1;clm_idx<=12;clm_idx++)); do
- printf "Climatological monthly mean for month ${clm_idx} ...\n"
+ # rgr_sfx=`expr match "${rgr_sfx}" '\(.*\)\.[0-9][0-9][0-9][0-9][0-9][0-9]'` #
+ fl_rgr[${clm_idx}]="${fl_rgr[${clm_idx}]/.nc/_${rgr_sfx}.nc}"
+ fi # !drc_rgr
+ fi # !rgr_map
+ done # !clm_idx
+fi # !clm_md
+
+# Block 1 Loop 2: Climatologies based on annual means
+if [ "${clm_md}" = 'ann' ]; then
+ clm_idx=1
+ yr_fl=''
+ for yr in `seq ${yyyy_srt} ${yyyy_end}`; do
+ YYYY=`printf "%04d" ${yr}`
+ yr_fl="${yr_fl} ${caseid}.${mdl_nm}.${hst_nm}.${YYYY}-${ann_sfx}.nc"
+ done # !yr
+ # Check for existence of raw input only when file will be used (NB: next ~12 lines duplicate monthly code)
+ if [ "${bnr_flg}" = 'No' ]; then
+ for fl_in in ${yr_fl} ; do
+ if [ ! -e "${drc_in}/${fl_in}" ]; then
+ echo "ERROR: Unable to find required input file ${drc_in}/${fl_in}"
+ echo "HINT: All files implied to exist by the climatology bounds (start/end year) must be in ${drc_in} before ${spt_nm} will proceed"
+ exit 1
+ fi # ! -e
+ done # !fl_in
+ else # !bnr_flg
+ # In binary mode drc_out is actually used to locate input files from climatology B (same as output files in incremental mode)
+ drc_out="${drc_in}"
+ fi # !bnr_flg
+ fl_out[${clm_idx}]="${drc_out}/${out_nm}_ANN_${yyyy_srt}01_${yyyy_end}12_climo.nc"
+ cmd_clm[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncra -O ${nco_opt} -p ${drc_in} ${yr_fl} ${fl_out[${clm_idx}]} ${par_opt}"
+
+ # Block 1 Loop 2: Climatological annual mean
+ printf "Climatological annual mean...\n"
if [ ${dbg_lvl} -ge 1 ]; then
echo ${cmd_clm[${clm_idx}]}
fi # !dbg
if [ ${dbg_lvl} -le 1 ]; then
- if [ -z "${par_opt}" ]; then
- eval ${cmd_clm[${clm_idx}]}
- if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR monthly climo cmd_clm[${clm_idx}] failed. Debug this:\n${cmd_clm[${clm_idx}]}\n"
- exit 1
- fi # !err
- else # !par_opt
- eval ${cmd_clm[${clm_idx}]} ${par_opt} # eval always returns 0 on backgrounded processes
- clm_pid[${clm_idx}]=$!
- # Potential alternatives to eval:
-# eval "${cmd_clm[${clm_idx}]}" # borken
-# ${cmd_clm[${clm_idx}]} # borken
-# "${cmd_clm[${clm_idx}]}" # borken
-# exec "${cmd_clm[${clm_idx}]}" # borken
-# $(${cmd_clm[${clm_idx}]}) # borken
-# $("${cmd_clm[${clm_idx}]}") # works (when & inside cmd quotes)
- fi # !par_opt
- fi # !dbg
-done # !clm_idx
-if [ -n "${par_opt}" ]; then
- for ((clm_idx=1;clm_idx<=12;clm_idx++)); do
- wait ${clm_pid[${clm_idx}]}
+ eval ${cmd_clm[${clm_idx}]}
if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR monthly climo cmd_clm[${clm_idx}] failed. Debug this:\n${cmd_clm[${clm_idx}]}\n"
+ printf "${spt_nm}: ERROR annual climo cmd_clm[${clm_idx}] failed\n"
exit 1
fi # !err
- done # !clm_idx
-fi # !par_opt
-wait
+ fi # !dbg
+ wait
+
+ # Block 2: Regrid climatological annual mean
+ if [ -n "${rgr_map}" ]; then
+ printf "Regrid annual data...\n"
+ cmd_rgr[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncks -t ${thr_nbr} -O ${nco_opt} ${rgr_opt} ${fl_out[${clm_idx}]} ${fl_out[${clm_idx}]/.nc/.rgr.nc}"
+ if [ ${dbg_lvl} -ge 1 ]; then
+ echo ${cmd_rgr[${clm_idx}]} ${par_opt}
+ fi # !dbg
+ if [ ${dbg_lvl} -le 1 ]; then
+ eval ${cmd_rgr[${clm_idx}]} ${par_opt}
+ if [ $? -ne 0 ]; then
+ printf "${spt_nm}: ERROR annual regrid cmd_rgr[${clm_idx}] failed\n"
+ exit 1
+ fi # !err
+ fi # !dbg
+ wait
+ printf "Done with regridding\n"
+ fi # !rgr_map
+
+fi # !clm_md
-# Block 1: Loop 4: Regrid first twelve files. Load-balance by using idle nodes (nodes not used for seasonal climatologies).
-if [ -n "${rgr_map}" ]; then
- printf "Regrid monthly data...\n"
+# Many subsequent blocks only executed for normal and incremental monthly climos, not for binary climos, or non-monthly climos
+if [ "${clm_md}" = 'mth' ] && [ "${bnr_flg}" = 'No' ]; then
+
+ # Block 1 Loop 2: Execute and/or echo monthly climatology commands
+ printf "Generating climatology...\n"
for ((clm_idx=1;clm_idx<=12;clm_idx++)); do
- # NB: Months, seasons, files are 1-based ([1..12], [13..16], [1..17]), nodes are 0-based ([0..11])
- let nd_idx=$(((clm_idx-1+4) % nd_nbr))
- if [ ${nd_idx} -lt 4 ]; then
- let nd_idx=${nd_idx}+4
- fi # !nd
- cmd_rgr[${clm_idx}]="${cmd_mpi[${nd_idx}]} ncks -t ${thr_nbr} -O ${nco_opt} ${rgr_opt} ${fl_out[${clm_idx}]} ${fl_rgr[${clm_idx}]}"
- if [ "${mdl_typ}" = 'mpas' ]; then
- cmd_rgr[${clm_idx}]="${cmd_mpi[${nd_idx}]} ncremap -C -u .pid${spt_pid}.climo.${clm_idx}.tmp -P mpas -t ${thr_nbr} -m ${rgr_map} -i ${fl_out[${clm_idx}]} -o ${fl_rgr[${clm_idx}]}"
- fi # !mdl_typ
+ printf "Climatological monthly mean for month ${clm_idx} ...\n"
if [ ${dbg_lvl} -ge 1 ]; then
- echo ${cmd_rgr[${clm_idx}]}
+ echo ${cmd_clm[${clm_idx}]}
fi # !dbg
if [ ${dbg_lvl} -le 1 ]; then
if [ -z "${par_opt}" ]; then
- eval ${cmd_rgr[${clm_idx}]}
+ eval ${cmd_clm[${clm_idx}]}
if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR monthly regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
+ printf "${spt_nm}: ERROR monthly climo cmd_clm[${clm_idx}] failed. Debug this:\n${cmd_clm[${clm_idx}]}\n"
exit 1
fi # !err
else # !par_opt
- eval ${cmd_rgr[${clm_idx}]} ${par_opt}
- rgr_pid[${clm_idx}]=$!
+ eval ${cmd_clm[${clm_idx}]} ${par_opt} # eval always returns 0 on backgrounded processes
+ clm_pid[${clm_idx}]=$!
+ # Potential alternatives to eval:
+ # eval "${cmd_clm[${clm_idx}]}" # borken
+ # ${cmd_clm[${clm_idx}]} # borken
+ # "${cmd_clm[${clm_idx}]}" # borken
+ # exec "${cmd_clm[${clm_idx}]}" # borken
+ # $(${cmd_clm[${clm_idx}]}) # borken
+ # $("${cmd_clm[${clm_idx}]}") # works (when & inside cmd quotes)
fi # !par_opt
fi # !dbg
- done
- # Start seasonal means first, then wait() for monthly regridding to finish
-fi # !rgr_map
-
-# Block 2: Climatological seasonal means
-# Block 2 Loop 1: Generate seasonal commands
-printf "Climatological seasonal means...\n"
-cmd_clm[13]="${cmd_mpi[13]} ncra --cb -O -w 31,30,31 ${nco_opt} ${fl_out[3]} ${fl_out[4]} ${fl_out[5]} ${fl_out[13]}"
-cmd_clm[14]="${cmd_mpi[14]} ncra --cb -O -w 30,31,31 ${nco_opt} ${fl_out[6]} ${fl_out[7]} ${fl_out[8]} ${fl_out[14]}"
-cmd_clm[15]="${cmd_mpi[15]} ncra --cb -O -w 30,31,30 ${nco_opt} ${fl_out[9]} ${fl_out[10]} ${fl_out[11]} ${fl_out[15]}"
-cmd_clm[16]="${cmd_mpi[16]} ncra --cb -O -w 31,31,28 ${nco_opt} ${fl_out[12]} ${fl_out[1]} ${fl_out[2]} ${fl_out[16]}"
-
-# Block 2 Loop 2: Execute and/or echo seasonal climatology commands
-for ((clm_idx=13;clm_idx<=16;clm_idx++)); do
- if [ ${dbg_lvl} -ge 1 ]; then
- echo ${cmd_clm[${clm_idx}]}
- fi # !dbg
- if [ ${dbg_lvl} -le 1 ]; then
- if [ -z "${par_opt}" ]; then
- eval ${cmd_clm[${clm_idx}]}
- if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR seasonal climo cmd_clm[${clm_idx}] failed. Debug this:\n${cmd_clm[${clm_idx}]}\n"
- exit 1
- fi # !err
- else # !par_opt
- eval ${cmd_clm[${clm_idx}]} ${par_opt}
- clm_pid[${clm_idx}]=$!
- fi # !par_opt
- fi # !dbg
-done # !clm_idx
-# wait() for monthly regridding, if any, to finish
-if [ -n "${rgr_map}" ]; then
+ done # !clm_idx
if [ -n "${par_opt}" ]; then
for ((clm_idx=1;clm_idx<=12;clm_idx++)); do
- wait ${rgr_pid[${clm_idx}]}
+ wait ${clm_pid[${clm_idx}]}
if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR monthly regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
+ printf "${spt_nm}: ERROR monthly climo cmd_clm[${clm_idx}] failed. Debug this:\n${cmd_clm[${clm_idx}]}\n"
exit 1
fi # !err
done # !clm_idx
fi # !par_opt
-fi # !rgr_map
-# wait() for seasonal climatologies to finish
-if [ -n "${par_opt}" ]; then
- for ((clm_idx=13;clm_idx<=16;clm_idx++)); do
- wait ${clm_pid[${clm_idx}]}
- if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR seasonal climo cmd_clm[${clm_idx}] failed. Debug this:\n${cmd_clm[${clm_idx}]}\n"
- exit 1
- fi # !err
- done # !clm_idx
-fi # !par_opt
-wait
+ wait
+
+ # Block 1: Loop 4: Regrid first twelve files. Load-balance by using idle nodes (nodes not used for seasonal climatologies).
+ if [ -n "${rgr_map}" ]; then
+ printf "Regrid monthly data...\n"
+ for ((clm_idx=1;clm_idx<=12;clm_idx++)); do
+ # NB: Months, seasons, files are 1-based ([1..12], [13..16], [1..17]), nodes are 0-based ([0..11])
+ let nd_idx=$(((clm_idx-1+4) % nd_nbr))
+ if [ ${nd_idx} -lt 4 ]; then
+ let nd_idx=${nd_idx}+4
+ fi # !nd
+ cmd_rgr[${clm_idx}]="${cmd_mpi[${nd_idx}]} ncks -t ${thr_nbr} -O ${nco_opt} ${rgr_opt} ${fl_out[${clm_idx}]} ${fl_rgr[${clm_idx}]}"
+ if [ "${mdl_typ}" = 'mpas' ]; then
+ cmd_rgr[${clm_idx}]="${cmd_mpi[${nd_idx}]} ncremap -C -u .pid${spt_pid}.climo.${clm_idx}.tmp -P mpas -t ${thr_nbr} -m ${rgr_map} -i ${fl_out[${clm_idx}]} -o ${fl_rgr[${clm_idx}]}"
+ fi # !mdl_typ
+ if [ ${dbg_lvl} -ge 1 ]; then
+ echo ${cmd_rgr[${clm_idx}]}
+ fi # !dbg
+ if [ ${dbg_lvl} -le 1 ]; then
+ if [ -z "${par_opt}" ]; then
+ eval ${cmd_rgr[${clm_idx}]}
+ if [ $? -ne 0 ]; then
+ printf "${spt_nm}: ERROR monthly regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
+ exit 1
+ fi # !err
+ else # !par_opt
+ eval ${cmd_rgr[${clm_idx}]} ${par_opt}
+ rgr_pid[${clm_idx}]=$!
+ fi # !par_opt
+ fi # !dbg
+ done
+ # Start seasonal means first, then wait() for monthly regridding to finish
+ fi # !rgr_map
+
+ # Block 2: Climatological seasonal means
+ # Block 2 Loop 1: Generate seasonal commands
+ printf "Climatological seasonal means...\n"
+ cmd_clm[13]="${cmd_mpi[13]} ncra --cb -O -w 31,30,31 ${nco_opt} ${fl_out[3]} ${fl_out[4]} ${fl_out[5]} ${fl_out[13]}"
+ cmd_clm[14]="${cmd_mpi[14]} ncra --cb -O -w 30,31,31 ${nco_opt} ${fl_out[6]} ${fl_out[7]} ${fl_out[8]} ${fl_out[14]}"
+ cmd_clm[15]="${cmd_mpi[15]} ncra --cb -O -w 30,31,30 ${nco_opt} ${fl_out[9]} ${fl_out[10]} ${fl_out[11]} ${fl_out[15]}"
+ cmd_clm[16]="${cmd_mpi[16]} ncra --cb -O -w 31,31,28 ${nco_opt} ${fl_out[12]} ${fl_out[1]} ${fl_out[2]} ${fl_out[16]}"
-# Block 2: Loop 4: Regrid seasonal files. Load-balance by using idle nodes (nodes not used for annual mean).
-if [ -n "${rgr_map}" ]; then
- printf "Regrid seasonal data...\n"
+ # Block 2 Loop 2: Execute and/or echo seasonal climatology commands
for ((clm_idx=13;clm_idx<=16;clm_idx++)); do
- let nd_idx=$(((clm_idx-1+4) % nd_nbr))
- if [ ${nd_idx} -lt 4 ]; then
- let nd_idx=${nd_idx}+4
- fi # !nd
- cmd_rgr[${clm_idx}]="${cmd_mpi[${nd_idx}]} ncks -t ${thr_nbr} -O ${nco_opt} ${rgr_opt} ${fl_out[${clm_idx}]} ${fl_rgr[${clm_idx}]}"
- if [ "${mdl_typ}" = 'mpas' ]; then
- cmd_rgr[${clm_idx}]="${cmd_mpi[${nd_idx}]} ncremap -C -u .pid${spt_pid}.climo.${clm_idx}.tmp -P mpas -t ${thr_nbr} -m ${rgr_map} -i ${fl_out[${clm_idx}]} -o ${fl_rgr[${clm_idx}]}"
- fi # !mdl_typ
if [ ${dbg_lvl} -ge 1 ]; then
- echo ${cmd_rgr[${clm_idx}]}
+ echo ${cmd_clm[${clm_idx}]}
fi # !dbg
if [ ${dbg_lvl} -le 1 ]; then
if [ -z "${par_opt}" ]; then
- eval ${cmd_rgr[${clm_idx}]}
+ eval ${cmd_clm[${clm_idx}]}
if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR seasonal regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
+ printf "${spt_nm}: ERROR seasonal climo cmd_clm[${clm_idx}] failed. Debug this:\n${cmd_clm[${clm_idx}]}\n"
exit 1
fi # !err
else # !par_opt
- eval ${cmd_rgr[${clm_idx}]} ${par_opt}
- rgr_pid[${clm_idx}]=$!
+ eval ${cmd_clm[${clm_idx}]} ${par_opt}
+ clm_pid[${clm_idx}]=$!
fi # !par_opt
fi # !dbg
- done
- # Start annual mean first, then wait() for seasonal regridding to finish
-fi # !rgr_map
-
-# Block 3: Climatological annual mean (seventeenth file)
-printf "Climatological annual mean...\n"
-cmd_clm[17]="${cmd_mpi[17]} ncra --c2b -O -w 92,92,91,90 ${nco_opt} ${fl_out[13]} ${fl_out[14]} ${fl_out[15]} ${fl_out[16]} ${fl_out[17]}"
-if [ ${dbg_lvl} -ge 1 ]; then
- echo ${cmd_clm[17]}
-fi # !dbg
-if [ ${dbg_lvl} -le 1 ]; then
- if [ -z "${par_opt}" ]; then
- eval ${cmd_clm[17]}
- if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR annual climo cmd_clm[17] failed. Debug this:\n${cmd_clm[17]}\n"
- exit 1
- fi # !err
- else # !par_opt
- eval ${cmd_clm[17]} ${par_opt}
- clm_pid[17]=$!
- fi # !par_opt
-fi # !dbg
-# wait() for seasonal regridding, if any, to finish
-if [ -n "${rgr_map}" ]; then
+ done # !clm_idx
+ # wait() for monthly regridding, if any, to finish
+ if [ -n "${rgr_map}" ]; then
+ if [ -n "${par_opt}" ]; then
+ for ((clm_idx=1;clm_idx<=12;clm_idx++)); do
+ wait ${rgr_pid[${clm_idx}]}
+ if [ $? -ne 0 ]; then
+ printf "${spt_nm}: ERROR monthly regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
+ exit 1
+ fi # !err
+ done # !clm_idx
+ fi # !par_opt
+ fi # !rgr_map
+ # wait() for seasonal climatologies to finish
if [ -n "${par_opt}" ]; then
for ((clm_idx=13;clm_idx<=16;clm_idx++)); do
- wait ${rgr_pid[${clm_idx}]}
+ wait ${clm_pid[${clm_idx}]}
if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR seasonal regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
+ printf "${spt_nm}: ERROR seasonal climo cmd_clm[${clm_idx}] failed. Debug this:\n${cmd_clm[${clm_idx}]}\n"
exit 1
fi # !err
done # !clm_idx
fi # !par_opt
-fi # !rgr_map
-# wait() for annual climatology to finish
-if [ -n "${par_opt}" ]; then
- wait ${clm_pid[17]}
- if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR annual climo cmd_clm[17] failed. Debug this:\n${cmd_clm[17]}\n"
- exit 1
- fi # !err
-fi # !par_opt
-
-# Block 5: Regrid climatological annual mean
-if [ -n "${rgr_map}" ]; then
- printf "Regrid annual data...\n"
- for ((clm_idx=17;clm_idx<=17;clm_idx++)); do
- cmd_rgr[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncks -t ${thr_nbr} -O ${nco_opt} ${rgr_opt} ${fl_out[${clm_idx}]} ${fl_rgr[${clm_idx}]}"
- if [ "${mdl_typ}" = 'mpas' ]; then
- cmd_rgr[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncremap -C -u .pid${spt_pid}.climo.${clm_idx}.tmp -P mpas -t ${thr_nbr} -m ${rgr_map} -i ${fl_out[${clm_idx}]} -o ${fl_rgr[${clm_idx}]}"
- fi # !mdl_typ
- if [ ${dbg_lvl} -ge 1 ]; then
- echo ${cmd_rgr[${clm_idx}]}
- fi # !dbg
- if [ ${dbg_lvl} -le 1 ]; then
- # NB: Do not background climatological mean regridding
- eval ${cmd_rgr[${clm_idx}]}
- if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR annual regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
- exit 1
- fi # !err
- fi # !dbg
- done
-fi # !rgr_map
-
-# Link ACME-climo to AMWG-climo filenames
-# drc_pwd is always fully qualified path but drc_out and drc_rgr may be relative paths
-# Strategy: Start in drc_pwd, cd to drc_rgr, then link so return code comes from ln not cd
-if [ ${lnk_flg} = 'Yes' ]; then
- printf "Link ACME-climo to AMWG-climo filenames...\n"
- for ((clm_idx=1;clm_idx<=17;clm_idx++)); do
- if [ -n "${rgr_map}" ]; then
- cmd_lnk[${clm_idx}]="cd ${drc_pwd};cd ${drc_rgr};ln -s -f ${fl_rgr[${clm_idx}]/${drc_rgr}\//} ${fl_amwg[${clm_idx}]/${drc_rgr}\//}"
- else
- cmd_lnk[${clm_idx}]="cd ${drc_pwd};cd ${drc_out};ln -s -f ${fl_out[${clm_idx}]/${drc_out}\//} ${fl_amwg[${clm_idx}]/${drc_out}\//}"
- fi # !rgr_map
- if [ ${dbg_lvl} -ge 1 ]; then
- echo ${cmd_lnk[${clm_idx}]}
- fi # !dbg
- if [ ${dbg_lvl} -le 1 ]; then
- eval ${cmd_lnk[${clm_idx}]}
+ wait
+
+ # Block 2: Loop 4: Regrid seasonal files. Load-balance by using idle nodes (nodes not used for annual mean).
+ if [ -n "${rgr_map}" ]; then
+ printf "Regrid seasonal data...\n"
+ for ((clm_idx=13;clm_idx<=16;clm_idx++)); do
+ let nd_idx=$(((clm_idx-1+4) % nd_nbr))
+ if [ ${nd_idx} -lt 4 ]; then
+ let nd_idx=${nd_idx}+4
+ fi # !nd
+ cmd_rgr[${clm_idx}]="${cmd_mpi[${nd_idx}]} ncks -t ${thr_nbr} -O ${nco_opt} ${rgr_opt} ${fl_out[${clm_idx}]} ${fl_rgr[${clm_idx}]}"
+ if [ "${mdl_typ}" = 'mpas' ]; then
+ cmd_rgr[${clm_idx}]="${cmd_mpi[${nd_idx}]} ncremap -C -u .pid${spt_pid}.climo.${clm_idx}.tmp -P mpas -t ${thr_nbr} -m ${rgr_map} -i ${fl_out[${clm_idx}]} -o ${fl_rgr[${clm_idx}]}"
+ fi # !mdl_typ
+ if [ ${dbg_lvl} -ge 1 ]; then
+ echo ${cmd_rgr[${clm_idx}]}
+ fi # !dbg
+ if [ ${dbg_lvl} -le 1 ]; then
+ if [ -z "${par_opt}" ]; then
+ eval ${cmd_rgr[${clm_idx}]}
+ if [ $? -ne 0 ]; then
+ printf "${spt_nm}: ERROR seasonal regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
+ exit 1
+ fi # !err
+ else # !par_opt
+ eval ${cmd_rgr[${clm_idx}]} ${par_opt}
+ rgr_pid[${clm_idx}]=$!
+ fi # !par_opt
+ fi # !dbg
+ done
+ # Start annual mean first, then wait() for seasonal regridding to finish
+ fi # !rgr_map
+
+ # Block 3: Climatological annual mean (seventeenth file)
+ printf "Climatological annual mean...\n"
+ cmd_clm[17]="${cmd_mpi[17]} ncra --c2b -O -w 92,92,91,90 ${nco_opt} ${fl_out[13]} ${fl_out[14]} ${fl_out[15]} ${fl_out[16]} ${fl_out[17]}"
+ if [ ${dbg_lvl} -ge 1 ]; then
+ echo ${cmd_clm[17]}
+ fi # !dbg
+ if [ ${dbg_lvl} -le 1 ]; then
+ if [ -z "${par_opt}" ]; then
+ eval ${cmd_clm[17]}
if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR linking ACME to AMWG filename cmd_lnk[${clm_idx}] failed. Debug this:\n${cmd_lnk[${clm_idx}]}\n"
+ printf "${spt_nm}: ERROR annual climo cmd_clm[17] failed. Debug this:\n${cmd_clm[17]}\n"
exit 1
fi # !err
- fi # !dbg
- done # !clm_idx
- cd ${drc_pwd}
-fi # !lnk_flg
+ else # !par_opt
+ eval ${cmd_clm[17]} ${par_opt}
+ clm_pid[17]=$!
+ fi # !par_opt
+ fi # !dbg
+ # wait() for seasonal regridding, if any, to finish
+ if [ -n "${rgr_map}" ]; then
+ if [ -n "${par_opt}" ]; then
+ for ((clm_idx=13;clm_idx<=16;clm_idx++)); do
+ wait ${rgr_pid[${clm_idx}]}
+ if [ $? -ne 0 ]; then
+ printf "${spt_nm}: ERROR seasonal regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
+ exit 1
+ fi # !err
+ done # !clm_idx
+ fi # !par_opt
+ fi # !rgr_map
+ # wait() for annual climatology to finish
+ if [ -n "${par_opt}" ]; then
+ wait ${clm_pid[17]}
+ if [ $? -ne 0 ]; then
+ printf "${spt_nm}: ERROR annual climo cmd_clm[17] failed. Debug this:\n${cmd_clm[17]}\n"
+ exit 1
+ fi # !err
+ fi # !par_opt
-# Incremental climos
-if [ -n "${yr_srt_prv}" ]; then
+ # Block 5: Regrid climatological annual mean
+ if [ -n "${rgr_map}" ]; then
+ printf "Regrid annual data...\n"
+ for ((clm_idx=17;clm_idx<=17;clm_idx++)); do
+ cmd_rgr[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncks -t ${thr_nbr} -O ${nco_opt} ${rgr_opt} ${fl_out[${clm_idx}]} ${fl_rgr[${clm_idx}]}"
+ if [ "${mdl_typ}" = 'mpas' ]; then
+ cmd_rgr[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncremap -C -u .pid${spt_pid}.climo.${clm_idx}.tmp -P mpas -t ${thr_nbr} -m ${rgr_map} -i ${fl_out[${clm_idx}]} -o ${fl_rgr[${clm_idx}]}"
+ fi # !mdl_typ
+ if [ ${dbg_lvl} -ge 1 ]; then
+ echo ${cmd_rgr[${clm_idx}]}
+ fi # !dbg
+ if [ ${dbg_lvl} -le 1 ]; then
+ # NB: Do not background climatological mean regridding
+ eval ${cmd_rgr[${clm_idx}]}
+ if [ $? -ne 0 ]; then
+ printf "${spt_nm}: ERROR annual regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
+ exit 1
+ fi # !err
+ fi # !dbg
+ done
+ fi # !rgr_map
+
+ # Link ACME-climo to AMWG-climo filenames
+ # drc_pwd is always fully qualified path but drc_out and drc_rgr may be relative paths
+ # Strategy: Start in drc_pwd, cd to drc_rgr, then link so return code comes from ln not cd
+ if [ ${lnk_flg} = 'Yes' ]; then
+ printf "Link ACME-climo to AMWG-climo filenames...\n"
+ for ((clm_idx=1;clm_idx<=17;clm_idx++)); do
+ if [ -n "${rgr_map}" ]; then
+ cmd_lnk[${clm_idx}]="cd ${drc_pwd};cd ${drc_rgr};ln -s -f ${fl_rgr[${clm_idx}]/${drc_rgr}\//} ${fl_amwg[${clm_idx}]/${drc_rgr}\//}"
+ else
+ cmd_lnk[${clm_idx}]="cd ${drc_pwd};cd ${drc_out};ln -s -f ${fl_out[${clm_idx}]/${drc_out}\//} ${fl_amwg[${clm_idx}]/${drc_out}\//}"
+ fi # !rgr_map
+ if [ ${dbg_lvl} -ge 1 ]; then
+ echo ${cmd_lnk[${clm_idx}]}
+ fi # !dbg
+ if [ ${dbg_lvl} -le 1 ]; then
+ eval ${cmd_lnk[${clm_idx}]}
+ if [ $? -ne 0 ]; then
+ printf "${spt_nm}: ERROR linking ACME to AMWG filename cmd_lnk[${clm_idx}] failed. Debug this:\n${cmd_lnk[${clm_idx}]}\n"
+ exit 1
+ fi # !err
+ fi # !dbg
+ done # !clm_idx
+ cd ${drc_pwd}
+ fi # !lnk_flg
+fi # !clm_md !bnr_flg
+
+# Extended climos
+if [ "${xtn_flg}" = 'Yes' ]; then
mkdir -p ${drc_prv}
mkdir -p ${drc_xtn}
trim_leading_zeros ${yr_srt_prv}
yr_srt_rth_prv=${sng_trm}
yyyy_srt_prv=`printf "%04d" ${yr_srt_rth_prv}`
+ yyyy_clm_srt_dec_prv=${yyyy_srt_prv}
let yr_srtm1_prv=${yr_srt_rth_prv}-1
- let yr_end_prv=${yr_srt_rth}-1
+ if [ "${ncr_flg}" = 'Yes' ]; then
+ let yr_end_prv=${yr_srt_rth}-1
+ fi # !ncr_flg
trim_leading_zeros ${yr_end_prv}
yr_end_rth_prv=${sng_trm}
yyyy_end_prv=`printf "%04d" ${yr_end_rth_prv}`
let yr_endm1_prv=${yr_end_rth_prv}-1
let yr_nbr_prv=${yr_end_rth_prv}-${yr_srt_rth_prv}+1
- let yr_nbr_xtn=${yr_end_rth}-${yr_srt_rth_prv}+1
- yyyy_clm_srt_dec_prv=${yyyy_srt_prv}
+ let yr_nbr_xtn=${yr_nbr_prv}+${yr_nbr}
+
wgt_prv=$(echo "${yr_nbr_prv}/${yr_nbr_xtn}" | bc -l)
wgt_crr=$(echo "${yr_nbr}/${yr_nbr_xtn}" | bc -l)
- printf "Incrementally produce extended climatology as weighted average of previous and current climatologies:\n"
- printf "Previous climatology is ${yr_nbr_prv} years from ${yyyy_clm_srt_dec_prv}${mm_ann_srt} to ${yyyy_end_prv}${mm_ann_end}, weight = ${wgt_prv}\n"
- printf "Current climatology is ${yr_nbr} years from ${yyyy_clm_srt_dec}${mm_ann_srt} to ${yyyy_end}${mm_ann_end}, weight = ${wgt_crr}\n"
- printf "Extended climatology is ${yr_nbr_xtn} years from ${yyyy_clm_srt_dec_prv}${mm_ann_srt} to ${yyyy_end}${mm_ann_end}\n"
-
+ if [ "${bnr_flg}" = 'Yes' ]; then
+ printf "Produce extended climatology as weighted average of two previously computed climatologies:\n"
+ else # !bnr_flg
+ printf "Produce extended climatology as weighted average of previously computed and incremental/new climatologies:\n"
+ fi # !bnr_flg
+
# Replace yr_srt by yr_srt_prv in "yrs_averaged" attribute
nco_opt="${nco_opt/${yr_srt}-/${yr_srt_prv}-}"
-
- clm_idx=0
- for mth in {01..12}; do
- let clm_idx=${clm_idx}+1
- MM=`printf "%02d" ${clm_idx}`
- fl_prv[${clm_idx}]="${drc_prv}/${out_nm}_${MM}_${yyyy_srt_prv}${MM}_${yyyy_end_prv}${MM}_climo.nc"
- fl_xtn[${clm_idx}]="${drc_xtn}/${out_nm}_${MM}_${yyyy_srt_prv}${MM}_${yyyy_end}${MM}_climo.nc"
- done # !mth
- if [ ${clm_md} = 'scd' ]; then
- yyyy_clm_srt_dec_prv=`printf "%04d" ${yr_srtm1_prv}`
- yyyy_clm_end_dec_prv=`printf "%04d" ${yr_endm1_prv}`
- clm_idx=12
- MM=`printf "%02d" ${clm_idx}`
- fl_prv[${clm_idx}]="${drc_prv}/${out_nm}_${MM}_${yyyy_clm_srt_dec_prv}${MM}_${yyyy_clm_end_dec_prv}${MM}_climo.nc"
- fl_xtn[${clm_idx}]="${drc_xtn}/${out_nm}_${MM}_${yyyy_clm_srt_dec_prv}${MM}_${yyyy_clm_end_dec}${MM}_climo.nc"
- fi # !scd
-
- fl_prv[13]="${drc_prv}/${out_nm}_MAM_${yyyy_srt_prv}03_${yyyy_end_prv}05_climo.nc"
- fl_prv[14]="${drc_prv}/${out_nm}_JJA_${yyyy_srt_prv}06_${yyyy_end_prv}08_climo.nc"
- fl_prv[15]="${drc_prv}/${out_nm}_SON_${yyyy_srt_prv}09_${yyyy_end_prv}11_climo.nc"
- fl_prv[16]="${drc_prv}/${out_nm}_DJF_${yyyy_clm_srt_dec_prv}${mm_djf_srt}_${yyyy_end_prv}${mm_djf_end}_climo.nc"
- fl_prv[17]="${drc_prv}/${out_nm}_ANN_${yyyy_clm_srt_dec_prv}${mm_ann_srt}_${yyyy_end_prv}${mm_ann_end}_climo.nc"
- fl_xtn[13]="${drc_xtn}/${out_nm}_MAM_${yyyy_srt_prv}03_${yyyy_end}05_climo.nc"
- fl_xtn[14]="${drc_xtn}/${out_nm}_JJA_${yyyy_srt_prv}06_${yyyy_end}08_climo.nc"
- fl_xtn[15]="${drc_xtn}/${out_nm}_SON_${yyyy_srt_prv}09_${yyyy_end}11_climo.nc"
- fl_xtn[16]="${drc_xtn}/${out_nm}_DJF_${yyyy_clm_srt_dec_prv}${mm_djf_srt}_${yyyy_end}${mm_djf_end}_climo.nc"
- fl_xtn[17]="${drc_xtn}/${out_nm}_ANN_${yyyy_clm_srt_dec_prv}${mm_ann_srt}_${yyyy_end}${mm_ann_end}_climo.nc"
+ if [ "${clm_md}" = 'ann' ]; then
+ printf "Previous/first climatology is ${yr_nbr_prv} years from ${yyyy_srt_prv} to ${yyyy_end_prv}, weight = ${wgt_prv}\n"
+ printf "Current/second climatology is ${yr_nbr} years from ${yyyy_srt} to ${yyyy_end}, weight = ${wgt_crr}\n"
+ printf "Extended climatology is ${yr_nbr_xtn} years from ${yyyy_srt_prv} to ${yyyy_end}\n"
+ fi # !clm_md
- # Derive all seventeen regridded and AMWG names from output names
- for ((clm_idx=1;clm_idx<=17;clm_idx++)); do
- fl_rgr_prv[${clm_idx}]="${fl_rgr[${clm_idx}]/${drc_rgr}/${drc_rgr_prv}}"
- fl_rgr_prv[${clm_idx}]="${fl_rgr_prv[${clm_idx}]/_${yyyy_srt}/_${yyyy_srt_prv}}"
- fl_rgr_prv[${clm_idx}]="${fl_rgr_prv[${clm_idx}]/_${yyyy_end}/_${yyyy_end_prv}}"
+ if [ "${clm_md}" = 'mth' ]; then
- fl_rgr_xtn[${clm_idx}]="${fl_rgr[${clm_idx}]/${drc_rgr}/${drc_rgr_xtn}}"
- fl_rgr_xtn[${clm_idx}]="${fl_rgr_xtn[${clm_idx}]/_${yyyy_srt}/_${yyyy_srt_prv}}"
-
- fl_amwg_xtn[${clm_idx}]=`expr match "${fl_xtn[${clm_idx}]}" '\(.*\)_.*_.*_climo.nc'` # Prune _YYYYYMM_YYYYMM_climo.nc
- fl_amwg_xtn[${clm_idx}]="${fl_amwg[${clm_idx}]}_climo.nc" # Replace with _climo.nc
- fl_amwg_xtn[${clm_idx}]="${fl_amwg[${clm_idx}]/${drc_xtn}\//}" # Delete prepended path to ease symlinking
- if [ ${clm_md} = 'scd' ] ; then
- # Handle Dec, DJF, and ANN
- if [ ${clm_idx} -eq 12 ] || [ ${clm_idx} -eq 16 ] || [ ${clm_idx} -eq 17 ] ; then
- fl_rgr_prv[${clm_idx}]="${fl_rgr[${clm_idx}]/${drc_rgr}/${drc_rgr_prv}}"
- fl_rgr_prv[${clm_idx}]="${fl_rgr_prv[${clm_idx}]/_${yyyy_clm_srt_dec}/_${yyyy_clm_srt_dec_prv}}"
- if [ ${clm_idx} -eq 12 ] ; then
- fl_rgr_prv[${clm_idx}]="${fl_rgr_prv[${clm_idx}]/_${yyyy_clm_end_dec}/_${yyyy_clm_end_dec_prv}}"
- else
- fl_rgr_prv[${clm_idx}]="${fl_rgr_prv[${clm_idx}]/_${yyyy_end}/_${yyyy_end_prv}}"
- fi # !Dec
-
- fl_rgr_xtn[${clm_idx}]="${fl_rgr[${clm_idx}]/${drc_rgr}/${drc_rgr_xtn}}"
- fl_rgr_xtn[${clm_idx}]="${fl_rgr_xtn[${clm_idx}]/_${yyyy_clm_srt_dec}/_${yyyy_clm_srt_dec_prv}}"
- fi # !Dec, DJF, ANN
- fi # !clm_md
- done # !clm_idx
-
- printf "Weight previous and new native-grid climos to produce extended climo...\n"
- for ((clm_idx=1;clm_idx<=17;clm_idx++)); do
- cmd_xtn[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncflint -O ${nco_opt} -w ${wgt_prv},${wgt_crr} ${fl_prv[${clm_idx}]} ${fl_out[${clm_idx}]} ${fl_xtn[${clm_idx}]}"
- if [ ${dbg_lvl} -ge 1 ]; then
- echo ${cmd_xtn[${clm_idx}]}
- fi # !dbg
- if [ ${dbg_lvl} -le 1 ]; then
- if [ -z "${par_opt}" ]; then
- eval ${cmd_xtn[${clm_idx}]}
- if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR extended climo cmd_xtn[${clm_idx}] failed. Debug this:\n${cmd_xtn[${clm_idx}]}\n"
- exit 1
- fi # !err
- else # !par_opt
- eval ${cmd_xtn[${clm_idx}]} ${par_opt} # eval always returns 0 on backgrounded processes
- xtn_pid[${clm_idx}]=$!
- fi # !par_opt
- fi # !dbg
- done # !clm_idx
- if [ -n "${par_opt}" ]; then
+ printf "Previous/first climatology is ${yr_nbr_prv} years from ${yyyy_clm_srt_dec_prv}${mm_ann_srt} to ${yyyy_end_prv}${mm_ann_end}, weight = ${wgt_prv}\n"
+ printf "Current/second climatology is ${yr_nbr} years from ${yyyy_clm_srt_dec}${mm_ann_srt} to ${yyyy_end}${mm_ann_end}, weight = ${wgt_crr}\n"
+ printf "Extended climatology is ${yr_nbr_xtn} years from ${yyyy_clm_srt_dec_prv}${mm_ann_srt} to ${yyyy_end}${mm_ann_end}\n"
+
+ clm_idx=0
+ for mth in {01..12}; do
+ let clm_idx=${clm_idx}+1
+ MM=`printf "%02d" ${clm_idx}`
+ fl_prv[${clm_idx}]="${drc_prv}/${out_nm}_${MM}_${yyyy_srt_prv}${MM}_${yyyy_end_prv}${MM}_climo.nc"
+ fl_xtn[${clm_idx}]="${drc_xtn}/${out_nm}_${MM}_${yyyy_srt_prv}${MM}_${yyyy_end}${MM}_climo.nc"
+ done # !mth
+ if [ ${dec_md} = 'scd' ]; then
+ yyyy_clm_srt_dec_prv=`printf "%04d" ${yr_srtm1_prv}`
+ yyyy_clm_end_dec_prv=`printf "%04d" ${yr_endm1_prv}`
+ clm_idx=12
+ MM=`printf "%02d" ${clm_idx}`
+ fl_prv[${clm_idx}]="${drc_prv}/${out_nm}_${MM}_${yyyy_clm_srt_dec_prv}${MM}_${yyyy_clm_end_dec_prv}${MM}_climo.nc"
+ fl_xtn[${clm_idx}]="${drc_xtn}/${out_nm}_${MM}_${yyyy_clm_srt_dec_prv}${MM}_${yyyy_clm_end_dec}${MM}_climo.nc"
+ fi # !scd
+
+ fl_prv[13]="${drc_prv}/${out_nm}_MAM_${yyyy_srt_prv}03_${yyyy_end_prv}05_climo.nc"
+ fl_prv[14]="${drc_prv}/${out_nm}_JJA_${yyyy_srt_prv}06_${yyyy_end_prv}08_climo.nc"
+ fl_prv[15]="${drc_prv}/${out_nm}_SON_${yyyy_srt_prv}09_${yyyy_end_prv}11_climo.nc"
+ fl_prv[16]="${drc_prv}/${out_nm}_DJF_${yyyy_clm_srt_dec_prv}${mm_djf_srt}_${yyyy_end_prv}${mm_djf_end}_climo.nc"
+ fl_prv[17]="${drc_prv}/${out_nm}_ANN_${yyyy_clm_srt_dec_prv}${mm_ann_srt}_${yyyy_end_prv}${mm_ann_end}_climo.nc"
+
+ fl_xtn[13]="${drc_xtn}/${out_nm}_MAM_${yyyy_srt_prv}03_${yyyy_end}05_climo.nc"
+ fl_xtn[14]="${drc_xtn}/${out_nm}_JJA_${yyyy_srt_prv}06_${yyyy_end}08_climo.nc"
+ fl_xtn[15]="${drc_xtn}/${out_nm}_SON_${yyyy_srt_prv}09_${yyyy_end}11_climo.nc"
+ fl_xtn[16]="${drc_xtn}/${out_nm}_DJF_${yyyy_clm_srt_dec_prv}${mm_djf_srt}_${yyyy_end}${mm_djf_end}_climo.nc"
+ fl_xtn[17]="${drc_xtn}/${out_nm}_ANN_${yyyy_clm_srt_dec_prv}${mm_ann_srt}_${yyyy_end}${mm_ann_end}_climo.nc"
+
+ # Derive all seventeen regridded and AMWG names from output names
for ((clm_idx=1;clm_idx<=17;clm_idx++)); do
- wait ${xtn_pid[${clm_idx}]}
- if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR extended climo cmd_xtn[${clm_idx}] failed. Debug this:\n${cmd_xtn[${clm_idx}]}\n"
- exit 1
- fi # !err
+ fl_rgr_prv[${clm_idx}]="${fl_rgr[${clm_idx}]/${drc_rgr}/${drc_rgr_prv}}"
+ fl_rgr_prv[${clm_idx}]="${fl_rgr_prv[${clm_idx}]/_${yyyy_srt}/_${yyyy_srt_prv}}"
+ fl_rgr_prv[${clm_idx}]="${fl_rgr_prv[${clm_idx}]/_${yyyy_end}/_${yyyy_end_prv}}"
+
+ fl_rgr_xtn[${clm_idx}]="${fl_rgr[${clm_idx}]/${drc_rgr}/${drc_rgr_xtn}}"
+ fl_rgr_xtn[${clm_idx}]="${fl_rgr_xtn[${clm_idx}]/_${yyyy_srt}/_${yyyy_srt_prv}}"
+
+ fl_amwg_xtn[${clm_idx}]=`expr match "${fl_xtn[${clm_idx}]}" '\(.*\)_.*_.*_climo.nc'` # Prune _YYYYYMM_YYYYMM_climo.nc
+ fl_amwg_xtn[${clm_idx}]="${fl_amwg[${clm_idx}]}_climo.nc" # Replace with _climo.nc
+ fl_amwg_xtn[${clm_idx}]="${fl_amwg[${clm_idx}]/${drc_xtn}\//}" # Delete prepended path to ease symlinking
+ if [ ${dec_md} = 'scd' ] ; then
+ # Handle Dec, DJF, and ANN
+ if [ ${clm_idx} -eq 12 ] || [ ${clm_idx} -eq 16 ] || [ ${clm_idx} -eq 17 ] ; then
+ fl_rgr_prv[${clm_idx}]="${fl_rgr[${clm_idx}]/${drc_rgr}/${drc_rgr_prv}}"
+ fl_rgr_prv[${clm_idx}]="${fl_rgr_prv[${clm_idx}]/_${yyyy_clm_srt_dec}/_${yyyy_clm_srt_dec_prv}}"
+ if [ ${clm_idx} -eq 12 ] ; then
+ fl_rgr_prv[${clm_idx}]="${fl_rgr_prv[${clm_idx}]/_${yyyy_clm_end_dec}/_${yyyy_clm_end_dec_prv}}"
+ else
+ fl_rgr_prv[${clm_idx}]="${fl_rgr_prv[${clm_idx}]/_${yyyy_end}/_${yyyy_end_prv}}"
+ fi # !Dec
+
+ fl_rgr_xtn[${clm_idx}]="${fl_rgr[${clm_idx}]/${drc_rgr}/${drc_rgr_xtn}}"
+ fl_rgr_xtn[${clm_idx}]="${fl_rgr_xtn[${clm_idx}]/_${yyyy_clm_srt_dec}/_${yyyy_clm_srt_dec_prv}}"
+ fi # !Dec, DJF, ANN
+ fi # !dec_md
done # !clm_idx
- fi # !par_opt
- wait
-
- if [ -n "${rgr_map}" ]; then
- printf "Weight previous and new regridded climos to produce extended regridded climo...\n"
+
+ printf "Weight input climos to produce extended climo...\n"
for ((clm_idx=1;clm_idx<=17;clm_idx++)); do
- cmd_rgr_xtn[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncflint -O ${nco_opt} -w ${wgt_prv},${wgt_crr} ${fl_rgr_prv[${clm_idx}]} ${fl_rgr[${clm_idx}]} ${fl_rgr_xtn[${clm_idx}]}"
+ cmd_xtn[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncflint -O ${nco_opt} -w ${wgt_prv},${wgt_crr} ${fl_prv[${clm_idx}]} ${fl_out[${clm_idx}]} ${fl_xtn[${clm_idx}]}"
if [ ${dbg_lvl} -ge 1 ]; then
- echo ${cmd_rgr_xtn[${clm_idx}]}
+ echo ${cmd_xtn[${clm_idx}]}
fi # !dbg
if [ ${dbg_lvl} -le 1 ]; then
if [ -z "${par_opt}" ]; then
- eval ${cmd_rgr_xtn[${clm_idx}]}
+ eval ${cmd_xtn[${clm_idx}]}
if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR extended climo cmd_rgr_xtn[${clm_idx}] failed. Debug this:\n${cmd_rgr_xtn[${clm_idx}]}\n"
+ printf "${spt_nm}: ERROR extended climo cmd_xtn[${clm_idx}] failed. Debug this:\n${cmd_xtn[${clm_idx}]}\n"
exit 1
fi # !err
else # !par_opt
- eval ${cmd_rgr_xtn[${clm_idx}]} ${par_opt} # eval always returns 0 on backgrounded processes
- rgr_xtn_pid[${clm_idx}]=$!
+ eval ${cmd_xtn[${clm_idx}]} ${par_opt} # eval always returns 0 on backgrounded processes
+ xtn_pid[${clm_idx}]=$!
fi # !par_opt
fi # !dbg
done # !clm_idx
if [ -n "${par_opt}" ]; then
for ((clm_idx=1;clm_idx<=17;clm_idx++)); do
- wait ${rgr_xtn_pid[${clm_idx}]}
+ wait ${xtn_pid[${clm_idx}]}
if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR extended climo cmd_rgr_xtn[${clm_idx}] failed. Debug this:\n${cmd_rgr_xtn[${clm_idx}]}\n"
+ printf "${spt_nm}: ERROR extended climo cmd_xtn[${clm_idx}] failed. Debug this:\n${cmd_xtn[${clm_idx}]}\n"
exit 1
fi # !err
done # !clm_idx
fi # !par_opt
wait
- fi # !rgr_map
+
+ if [ -n "${rgr_map}" ]; then
+ printf "Weight input climos to produce extended regridded climo...\n"
+ for ((clm_idx=1;clm_idx<=17;clm_idx++)); do
+ cmd_rgr_xtn[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncflint -O ${nco_opt} -w ${wgt_prv},${wgt_crr} ${fl_rgr_prv[${clm_idx}]} ${fl_rgr[${clm_idx}]} ${fl_rgr_xtn[${clm_idx}]}"
+ if [ ${dbg_lvl} -ge 1 ]; then
+ echo ${cmd_rgr_xtn[${clm_idx}]}
+ fi # !dbg
+ if [ ${dbg_lvl} -le 1 ]; then
+ if [ -z "${par_opt}" ]; then
+ eval ${cmd_rgr_xtn[${clm_idx}]}
+ if [ $? -ne 0 ]; then
+ printf "${spt_nm}: ERROR extended climo cmd_rgr_xtn[${clm_idx}] failed. Debug this:\n${cmd_rgr_xtn[${clm_idx}]}\n"
+ exit 1
+ fi # !err
+ else # !par_opt
+ eval ${cmd_rgr_xtn[${clm_idx}]} ${par_opt} # eval always returns 0 on backgrounded processes
+ rgr_xtn_pid[${clm_idx}]=$!
+ fi # !par_opt
+ fi # !dbg
+ done # !clm_idx
+ if [ -n "${par_opt}" ]; then
+ for ((clm_idx=1;clm_idx<=17;clm_idx++)); do
+ wait ${rgr_xtn_pid[${clm_idx}]}
+ if [ $? -ne 0 ]; then
+ printf "${spt_nm}: ERROR extended climo cmd_rgr_xtn[${clm_idx}] failed. Debug this:\n${cmd_rgr_xtn[${clm_idx}]}\n"
+ exit 1
+ fi # !err
+ done # !clm_idx
+ fi # !par_opt
+ wait
+ fi # !rgr_map
+
+ # Link ACME-climo to AMWG-climo filenames
+ # drc_pwd is always fully qualified path but drc_out and drc_rgr may be relative paths
+ # Strategy: Start in drc_pwd, cd to drc_rgr, then link so return code comes from ln not cd
+ if [ ${lnk_flg} = 'Yes' ]; then
+ printf "Link extended ACME-climo to AMWG-climo filenames...\n"
+ for ((clm_idx=1;clm_idx<=17;clm_idx++)); do
+ if [ -n "${rgr_map}" ]; then
+ cmd_lnk_xtn[${clm_idx}]="cd ${drc_pwd};cd ${drc_rgr_xtn};ln -s -f ${fl_rgr_xtn[${clm_idx}]/${drc_rgr_xtn}\//} ${fl_amwg[${clm_idx}]/${drc_rgr_xtn}\//}"
+ else
+ cmd_lnk_xtn[${clm_idx}]="cd ${drc_pwd};cd ${drc_xtn};ln -s -f ${fl_xtn[${clm_idx}]/${drc_xtn}\//} ${fl_amwg[${clm_idx}]/${drc_xtn}\//}"
+ fi # !rgr_map
+ if [ ${dbg_lvl} -ge 1 ]; then
+ echo ${cmd_lnk_xtn[${clm_idx}]}
+ fi # !dbg
+ if [ ${dbg_lvl} -le 1 ]; then
+ eval ${cmd_lnk_xtn[${clm_idx}]}
+ if [ $? -ne 0 ]; then
+ printf "${spt_nm}: ERROR linking ACME to AMWG filename cmd_lnk_xtn[${clm_idx}] failed. Debug this:\n${cmd_lnk_xtn[${clm_idx}]}\n"
+ exit 1
+ fi # !err
+ fi # !dbg
+ done # !clm_idx
+ cd ${drc_pwd}
+ fi # !lnk_flg
+ fi # !clm_md
- # Link ACME-climo to AMWG-climo filenames
- # drc_pwd is always fully qualified path but drc_out and drc_rgr may be relative paths
- # Strategy: Start in drc_pwd, cd to drc_rgr, then link so return code comes from ln not cd
- if [ ${lnk_flg} = 'Yes' ]; then
- printf "Link extended ACME-climo to AMWG-climo filenames...\n"
- for ((clm_idx=1;clm_idx<=17;clm_idx++)); do
- if [ -n "${rgr_map}" ]; then
- cmd_lnk_xtn[${clm_idx}]="cd ${drc_pwd};cd ${drc_rgr_xtn};ln -s -f ${fl_rgr_xtn[${clm_idx}]/${drc_rgr_xtn}\//} ${fl_amwg[${clm_idx}]/${drc_rgr_xtn}\//}"
- else
- cmd_lnk_xtn[${clm_idx}]="cd ${drc_pwd};cd ${drc_xtn};ln -s -f ${fl_xtn[${clm_idx}]/${drc_xtn}\//} ${fl_amwg[${clm_idx}]/${drc_xtn}\//}"
- fi # !rgr_map
- if [ ${dbg_lvl} -ge 1 ]; then
- echo ${cmd_lnk_xtn[${clm_idx}]}
- fi # !dbg
- if [ ${dbg_lvl} -le 1 ]; then
- eval ${cmd_lnk_xtn[${clm_idx}]}
- if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR linking ACME to AMWG filename cmd_lnk_xtn[${clm_idx}] failed. Debug this:\n${cmd_lnk_xtn[${clm_idx}]}\n"
- exit 1
- fi # !err
- fi # !dbg
- done # !clm_idx
- cd ${drc_pwd}
- fi # !lnk_flg
-
-fi # !yr_srt_prv incremental climos
+else # !xtn_flg extended climos
+
+ yr_nbr_xtn=${yr_nbr}
+
+fi # !xtn_flg extended climos
date_end=$(date +"%s")
-printf "Completed climatology generation for dataset ${caseid} at `date`.\n"
+printf "Completed ${yr_nbr_xtn}-year climatology generation for dataset ${caseid} at `date`.\n"
date_dff=$((date_end-date_srt))
echo "Quick plots of climatological annual mean:"
if [ -n "${yr_srt_prv}" ]; then
if [ -n "${rgr_map}" ]; then
- echo "ncview ${fl_rgr_xtn[17]} &"
- echo "panoply ${fl_rgr_xtn[17]} &"
+ echo "ncview ${fl_rgr_xtn[${clm_nbr}]} &"
+ echo "panoply ${fl_rgr_xtn[${clm_nbr}]} &"
else
- echo "ncview ${fl_xtn[17]} &"
- echo "panoply ${fl_xtn[17]} &"
+ echo "ncview ${fl_xtn[${clm_nbr}]} &"
+ echo "panoply ${fl_xtn[${clm_nbr}]} &"
fi # !rgr_map
else
if [ -n "${rgr_map}" ]; then
- echo "ncview ${fl_rgr[17]} &"
- echo "panoply ${fl_rgr[17]} &"
+ echo "ncview ${fl_rgr[${clm_nbr}]} &"
+ echo "panoply ${fl_rgr[${clm_nbr}]} &"
else
- echo "ncview ${fl_out[17]} &"
- echo "panoply ${fl_out[17]} &"
+ echo "ncview ${fl_out[${clm_nbr}]} &"
+ echo "panoply ${fl_out[${clm_nbr}]} &"
fi # !rgr_map
fi # !yr_srt_prv
echo "Elapsed time $((date_dff/60))m$((date_dff % 60))s"
diff --git a/data/ncremap b/data/ncremap
index 58894df..6626009 100755
--- a/data/ncremap
+++ b/data/ncremap
@@ -54,6 +54,7 @@ while [ -h "${spt_src}" ]; do # Recursively resolve ${spt_src} until file is no
spt_src="$(readlink "${spt_src}")"
[[ ${spt_src} != /* ]] && spt_src="${drc_spt}/${spt_src}" # If ${spt_src} was relative symlink, resolve it relative to path where symlink file was located
done
+cmd_ln="${spt_src} ${@}"
drc_spt="$( cd -P "$( dirname "${spt_src}" )" && pwd )"
spt_nm=$(basename ${spt_src}) # [sng] Script name (Unlike $0, ${BASH_SOURCE[0]} works well with 'source <script>')
spt_pid=$$ # [nbr] Script PID (process ID)
@@ -215,9 +216,17 @@ nco_vrs=$(ncks --version 2>&1 >/dev/null | grep NCO | awk '{print $5}')
# When running in a terminal window (not in an non-interactive batch queue)...
if [ -n "${TERM}" ]; then
# Set fonts for legibility
- fnt_nrm=`tput sgr0` # Normal
- fnt_bld=`tput bold` # Bold
- fnt_rvr=`tput smso` # Reverse
+ if [ -x /usr/bin/tput ] && tput setaf 1 &> /dev/null; then
+ fnt_bld=`tput bold` # Bold
+ fnt_nrm=`tput sgr0` # Normal
+ fnt_rvr=`tput smso` # Reverse
+ fnt_tlc=`tput sitm` # Italic
+ else
+ fnt_bld="\e[1m" # Bold
+ fnt_nrm="\e[0m" # Normal
+ fnt_rvr="\e[07m" # Reverse
+ fnt_tlc="\e[3m" # Italic
+ fi # !tput
fi # !TERM
# Defaults for command-line options and some derived variables
@@ -237,14 +246,14 @@ dst_xmp='dst.nc' # [sng] Destination file for examples
#esmf_opt='--ignore_unmapped --netcdf4' # [sng] ESMF_RegridWeightGen options
esmf_opt='--ignore_unmapped' # [sng] ESMF_RegridWeightGen options
fl_nbr=0 # [nbr] Number of files to remap
-gaa_sng="--gaa remap_script=${spt_nm} --gaa remap_hostname=${HOSTNAME} --gaa remap_version=${nco_vrs}" # [sng] Global attributes to add
+gaa_sng="--gaa remap_script=${spt_nm} --gaa remap_command=\"'${cmd_ln}'\" --gaa remap_hostname=${HOSTNAME} --gaa remap_version=${nco_vrs}" # [sng] Global attributes to add
grd_dst='' # [sng] Destination grid-file
grd_dst_glb="${DATA}/grids/180x360_SCRIP.20150901.nc" # [sng] Grid-file (destination) global
grd_dst_xmp='grd_dst.nc' # [sng] Destination grid-file for examples
grd_sng='' # [sng] Grid string
grd_src='' # [sng] Source grid-file
grd_src_xmp='grd_src.nc' # [sng] Source grid-file for examples
-hdr_pad='1000' # [B] Pad at end of header section
+hdr_pad='10000' # [B] Pad at end of header section
in_fl='' # [sng] Input file
#in_fl='AIRS.2014.10.01.202.L2.TSurfStd.Regrid010.1DLatLon.hole.nc' # [sng] Input file
in_xmp='in.nc' # [sng] Input file for examples
@@ -287,35 +296,35 @@ fi # !gpfs
function fnc_usg_prn { # NB: dash supports fnc_nm (){} syntax, not function fnc_nm{} syntax
# Print usage
- printf "\nComplete documentation for ${fnt_bld}${spt_nm}${fnt_nrm} at http://nco.sf.net/nco.html#ncremap\n\n"
- printf "${fnt_rvr}Basic usage:${fnt_nrm} ${fnt_bld}$spt_nm -i in_fl -d dst_fl -o out_fl${fnt_nrm}\n\n"
- echo "Command-line options:"
- echo "${fnt_rvr}-a${fnt_nrm} ${fnt_bld}alg_typ${fnt_nrm} Algorithm for ESMF weight generation (default ${fnt_bld}${alg_typ}${fnt_nrm})"
- echo "${fnt_rvr}-d${fnt_nrm} ${fnt_bld}dst_fl${fnt_nrm} Data file to infer destination grid from (empty means none, i.e., use grd_fl, grd_sng or map_fl)) (default ${fnt_bld}${dst_fl}${fnt_nrm})"
- echo "${fnt_rvr}-D${fnt_nrm} ${fnt_bld}dbg_lvl${fnt_nrm} Debugging level (default ${fnt_bld}${dbg_lvl}${fnt_nrm})"
- echo "${fnt_rvr}-E${fnt_nrm} ${fnt_bld}esmf_opt${fnt_nrm} ESMF ESMF_RegridWeightGen options (default ${fnt_bld}${esmf_opt}${fnt_nrm})"
- echo "${fnt_rvr}-g${fnt_nrm} ${fnt_bld}grd_dst${fnt_nrm} Grid-file (destination) (empty means none, i.e., infer from dst_fl or use grd_sng or map_fl) (default ${fnt_bld}${grd_dst}${fnt_nrm})"
- echo "${fnt_rvr}-G${fnt_nrm} ${fnt_bld}grd_sng${fnt_nrm} Grid generation argument string (empty means none) (default ${fnt_bld}${grd_sng}${fnt_nrm})"
- echo "${fnt_rvr}-I${fnt_nrm} ${fnt_bld}drc_in${fnt_nrm} Input directory (empty means none) (default ${fnt_bld}${drc_in}${fnt_nrm})"
- echo "${fnt_rvr}-i${fnt_nrm} ${fnt_bld}in_fl${fnt_nrm} Input file (empty means use stdin or drc_in) (default ${fnt_bld}${in_fl}${fnt_nrm})"
- echo "${fnt_rvr}-j${fnt_nrm} ${fnt_bld}job_nbr${fnt_nrm} Job simultaneity for parallelism (default ${fnt_bld}${job_nbr}${fnt_nrm})"
- echo "${fnt_rvr}-M${fnt_nrm} Multi-map-file toggle (unset means generate one map-file per input file)"
- echo "${fnt_rvr}-m${fnt_nrm} ${fnt_bld}map_fl${fnt_nrm} Map-file (empty means generate internally) (default ${fnt_bld}${map_fl}${fnt_nrm})"
- echo "${fnt_rvr}-n${fnt_nrm} ${fnt_bld}nco_opt${fnt_nrm} NCO options (empty means none) (default ${fnt_bld}${nco_opt}${fnt_nrm})"
- echo "${fnt_rvr}-O${fnt_nrm} ${fnt_bld}drc_out${fnt_nrm} Output directory (default ${fnt_bld}${drc_out}${fnt_nrm})"
- echo "${fnt_rvr}-o${fnt_nrm} ${fnt_bld}out_fl${fnt_nrm} Output-file (regridded file) (empty copies Input filename) (default ${fnt_bld}${out_fl}${fnt_nrm})"
- echo "${fnt_rvr}-P${fnt_nrm} ${fnt_bld}pdq_typ${fnt_nrm} Permutation type (empty means none) (default ${fnt_bld}${pdq_typ}${fnt_nrm})"
- echo "${fnt_rvr}-p${fnt_nrm} ${fnt_bld}par_typ${fnt_nrm} Parallelism type (default ${fnt_bld}${par_typ}${fnt_nrm})"
- echo "${fnt_rvr}-R${fnt_nrm} ${fnt_bld}rgr_opt${fnt_nrm} Regridding options (empty means none) (default ${fnt_bld}${rgr_opt}${fnt_nrm})"
- echo "${fnt_rvr}-s${fnt_nrm} ${fnt_bld}grd_src${fnt_nrm} Grid-file (source) (empty means infer or use map_fl) (default ${fnt_bld}${grd_src}${fnt_nrm})"
- echo "${fnt_rvr}-T${fnt_nrm} ${fnt_bld}tps_opt${fnt_nrm} TempestRemap GenerateOfflineMap options (default ${fnt_bld}${tps_opt}${fnt_nrm})"
- echo "${fnt_rvr}-t${fnt_nrm} ${fnt_bld}thr_nbr${fnt_nrm} Thread number for regridder (default ${fnt_bld}${thr_nbr}${fnt_nrm})"
- echo "${fnt_rvr}-U${fnt_nrm} ${fnt_bld}drc_tmp${fnt_nrm} Temporary directory (for intermediate files) (default ${fnt_bld}${drc_tmp}${fnt_nrm})"
- echo "${fnt_rvr}-u${fnt_nrm} ${fnt_bld}unq_sfx${fnt_nrm} Unique suffix (prevents intermediate files from sharing names) (default ${fnt_bld}${unq_sfx}${fnt_nrm})"
- echo "${fnt_rvr}-V${fnt_nrm} ${fnt_bld}var_rgr${fnt_nrm} CF template variable (empty means none) (default ${fnt_bld}${var_rgr}${fnt_nrm})"
- echo "${fnt_rvr}-v${fnt_nrm} ${fnt_bld}var_lst${fnt_nrm} Variable list (empty means all) (default ${fnt_bld}${var_lst}${fnt_nrm})"
- echo "${fnt_rvr}-w${fnt_nrm} ${fnt_bld}wgt_gnr${fnt_nrm} Weight-generator (default ${fnt_bld}${wgt_gnr}${fnt_nrm})"
- echo "${fnt_rvr}-x${fnt_nrm} ${fnt_bld}xtn_var${fnt_nrm} Extensive variables (empty means none) (default ${fnt_bld}${xtn_var}${fnt_nrm})"
+ printf "${fnt_rvr}Basic usage:\n${fnt_nrm} ${fnt_bld}$spt_nm -i in_fl -d dst_fl -o out_fl${fnt_nrm}\n"
+ printf "${fnt_nrm} ${fnt_bld}${spt_nm} --input_file=in_fl --destination=dst_fl --output_file=out_fl${fnt_nrm}\n\n"
+ echo "Command-line options [long-option synonyms in ${fnt_tlc}italics${fnt_nrm}]:"
+ echo "${fnt_rvr}-a${fnt_nrm} ${fnt_bld}alg_typ${fnt_nrm} Algorithm for ESMF weight generation (default ${fnt_bld}${alg_typ}${fnt_nrm}) [${fnt_tlc}alg_typ, algorithm, regrid_algorithm${fnt_nrm}]"
+ echo "${fnt_rvr}-d${fnt_nrm} ${fnt_bld}dst_fl${fnt_nrm} Data file to infer destination grid from (empty means none, i.e., use grd_fl, grd_sng or map_fl)) (default ${fnt_bld}${dst_fl}${fnt_nrm}) [${fnt_tlc}dst_fl, destination_file, template_file, template${fnt_nrm}]"
+ echo "${fnt_rvr}-D${fnt_nrm} ${fnt_bld}dbg_lvl${fnt_nrm} Debug level (default ${fnt_bld}${dbg_lvl}${fnt_nrm}) [${fnt_tlc}dbg_lvl, dbg, debug, debug_level${fnt_nrm}]"
+ echo "${fnt_rvr}-E${fnt_nrm} ${fnt_bld}esmf_opt${fnt_nrm} ESMF ESMF_RegridWeightGen options (default ${fnt_bld}${esmf_opt}${fnt_nrm}) [${fnt_tlc}esmf_opt, esmf, esmf_options${fnt_nrm}]"
+ echo "${fnt_rvr}-G${fnt_nrm} ${fnt_bld}grd_sng${fnt_nrm} Grid generation argument string (empty means none) (default ${fnt_bld}${grd_sng}${fnt_nrm}) [${fnt_tlc}grd_sng, grid_generation, grid_gen, grid_string${fnt_nrm}]"
+ echo "${fnt_rvr}-g${fnt_nrm} ${fnt_bld}grd_dst${fnt_nrm} Grid-file (destination) (empty means none, i.e., infer from dst_fl or use grd_sng or map_fl) (default ${fnt_bld}${grd_dst}${fnt_nrm}) [${fnt_tlc}grd_dst, grid_dest, dest_grid, destination_grid${fnt_nrm}]"
+ echo "${fnt_rvr}-I${fnt_nrm} ${fnt_bld}drc_in${fnt_nrm} Input directory (empty means none) (default ${fnt_bld}${drc_in}${fnt_nrm}) [${fnt_tlc}drc_in, in_drc, dir_in, in_dir, input${fnt_nrm}]"
+ echo "${fnt_rvr}-i${fnt_nrm} ${fnt_bld}in_fl${fnt_nrm} Input file (empty means use stdin or drc_in) (default ${fnt_bld}${in_fl}${fnt_nrm}) [${fnt_tlc}in_fl, in_file, input_file${fnt_nrm}]"
+ echo "${fnt_rvr}-j${fnt_nrm} ${fnt_bld}job_nbr${fnt_nrm} Job simultaneity for parallelism (default ${fnt_bld}${job_nbr}${fnt_nrm}) [${fnt_tlc}job_nbr, job_number, jobs${fnt_nrm}]"
+ echo "${fnt_rvr}-M${fnt_nrm} Multi-map-file toggle (unset means generate one map-file per input file) [${fnt_tlc}mlt_map, no_multimap${fnt_nrm}]"
+ echo "${fnt_rvr}-m${fnt_nrm} ${fnt_bld}map_fl${fnt_nrm} Map-file (empty means generate internally) (default ${fnt_bld}${map_fl}${fnt_nrm}) [${fnt_tlc}map_fl, map, map_file, rgr_map, regrid_map${fnt_nrm}]"
+ echo "${fnt_rvr}-n${fnt_nrm} ${fnt_bld}nco_opt${fnt_nrm} NCO options (empty means none) (default ${fnt_bld}${nco_opt}${fnt_nrm}) [${fnt_tlc}nco_opt, nco_options${fnt_nrm}]"
+ echo "${fnt_rvr}-O${fnt_nrm} ${fnt_bld}drc_out${fnt_nrm} Output directory (default ${fnt_bld}${drc_out}${fnt_nrm}) [${fnt_tlc}drc_out, out_drc, dir_out, out_dir, output${fnt_nrm}]"
+ echo "${fnt_rvr}-o${fnt_nrm} ${fnt_bld}out_fl${fnt_nrm} Output-file (regridded file) (empty copies Input filename) (default ${fnt_bld}${out_fl}${fnt_nrm}) [${fnt_tlc}out_fl, out_file, output_file${fnt_nrm}]"
+ echo "${fnt_rvr}-P${fnt_nrm} ${fnt_bld}pdq_typ${fnt_nrm} Permutation type (empty means none) (default ${fnt_bld}${pdq_typ}${fnt_nrm}) [${fnt_tlc}pdq_typ, prm_typ, permutation, permute${fnt_nrm}]"
+ echo "${fnt_rvr}-p${fnt_nrm} ${fnt_bld}par_typ${fnt_nrm} Parallelism type (default ${fnt_bld}${par_typ}${fnt_nrm}) [${fnt_tlc}par_typ, par_md, parallel_type, parallel_mode, parallel${fnt_nrm}]"
+ echo "${fnt_rvr}-R${fnt_nrm} ${fnt_bld}rgr_opt${fnt_nrm} Regrid options (empty means none) (default ${fnt_bld}${rgr_opt}${fnt_nrm}) [${fnt_tlc}rgr_opt, regrid_options${fnt_nrm}]"
+ echo "${fnt_rvr}-s${fnt_nrm} ${fnt_bld}grd_src${fnt_nrm} Grid-file (source) (empty means infer or use map_fl) (default ${fnt_bld}${grd_src}${fnt_nrm}) [${fnt_tlc}grd_src, grid_source, source_grid, src_grd${fnt_nrm}]"
+ echo "${fnt_rvr}-T${fnt_nrm} ${fnt_bld}tps_opt${fnt_nrm} TempestRemap GenerateOfflineMap options (default ${fnt_bld}${tps_opt}${fnt_nrm}) [${fnt_tlc}tps_opt, tempest_opt, tempest, tempest_options${fnt_nrm}]"
+ echo "${fnt_rvr}-t${fnt_nrm} ${fnt_bld}thr_nbr${fnt_nrm} Thread number for regridder (default ${fnt_bld}${thr_nbr}${fnt_nrm}) [${fnt_tlc}thr_nbr, thread_number, thread, threads${fnt_nrm}]"
+ echo "${fnt_rvr}-U${fnt_nrm} ${fnt_bld}drc_tmp${fnt_nrm} Temporary directory (for intermediate files) (default ${fnt_bld}${drc_tmp}${fnt_nrm}) [${fnt_tlc}drc_tmp, tmp_drc, dir_tmp, tmp_dir, tmp${fnt_nrm}]"
+ echo "${fnt_rvr}-u${fnt_nrm} ${fnt_bld}unq_sfx${fnt_nrm} Unique suffix (prevents intermediate files from sharing names) (default ${fnt_bld}${unq_sfx}${fnt_nrm}) [${fnt_tlc}unq_sfx, unique_suffix, suffix${fnt_nrm}]"
+ echo "${fnt_rvr}-V${fnt_nrm} ${fnt_bld}var_rgr${fnt_nrm} CF template variable (empty means none) (default ${fnt_bld}${var_rgr}${fnt_nrm}) [${fnt_tlc}var_rgr, rgr_var, var_cf, cf_var, cf_variable${fnt_nrm}]"
+ echo "${fnt_rvr}-v${fnt_nrm} ${fnt_bld}var_lst${fnt_nrm} Variable list (empty means all) (default ${fnt_bld}${var_lst}${fnt_nrm}) [${fnt_tlc}var_lst, variable_list, variable, variables${fnt_nrm}]"
+ echo "${fnt_rvr}-w${fnt_nrm} ${fnt_bld}wgt_gnr${fnt_nrm} Weight-generator (default ${fnt_bld}${wgt_gnr}${fnt_nrm}) [${fnt_tlc}wgt_gnr, weight_generator, generator${fnt_nrm}]"
+ echo "${fnt_rvr}-x${fnt_nrm} ${fnt_bld}xtn_var${fnt_nrm} Extensive variables (empty means none) (default ${fnt_bld}${xtn_var}${fnt_nrm}) [${fnt_tlc}xtn_var, xtn_lst, extensive, var_xtn, extensive_variables${fnt_nrm}]"
printf "\n"
printf "Examples: ${fnt_bld}$spt_nm -i ${in_xmp} -m ${map_xmp} -o ${out_xmp} ${fnt_nrm}\n"
printf " ${fnt_bld}$spt_nm -i ${in_xmp} -d ${dst_xmp} -o ${out_xmp} ${fnt_nrm}\n"
@@ -334,6 +343,7 @@ function fnc_usg_prn { # NB: dash supports fnc_nm (){} syntax, not function fnc_
printf " ${fnt_bld}$spt_nm -I ${drc_in_xmp} -g ${grd_dst_xmp} -O ${drc_out_xmp} ${fnt_nrm}\n"
printf " ${fnt_bld}ls mdl*2005*nc | $spt_nm -m ${map_xmp} -O ${drc_out_xmp} ${fnt_nrm}\n"
printf " ${fnt_bld}ls mdl*2005*nc | $spt_nm -d ${dst_xmp} -O ${drc_out_xmp} ${fnt_nrm}\n"
+ printf "\nComplete documentation at http://nco.sf.net/nco.html#${spt_nm}\n\n"
exit 1
} # end fnc_usg_prn()
@@ -357,38 +367,74 @@ fi # !arg_nbr
# Parse command-line options:
# http://stackoverflow.com/questions/402377/using-getopts-in-bash-shell-script-to-get-long-and-short-command-line-options
# http://tuxtweaks.com/2014/05/bash-getopts
-cmd_ln="${spt_nm} ${@}"
-while getopts :a:CD:d:E:f:g:G:h:I:i:j:Mm:n:O:o:P:p:R:s:T:t:U:u:V:v:w:x: OPT; do
+while getopts :a:CD:d:E:f:g:G:h:I:i:j:Mm:n:O:o:P:p:R:s:T:t:U:u:V:v:w:x:-: OPT; do
case ${OPT} in
- a) alg_typ=${OPTARG} ;; # Algorithm
+ a) alg_typ="${OPTARG}" ;; # Algorithm
C) clm_flg='Yes' ;; # Climo flag (undocumented)
- D) dbg_lvl=${OPTARG} ;; # Debugging level
- d) dst_fl=${OPTARG} ;; # Destination file
- E) esmf_opt=${OPTARG} ;; # ESMF options
- g) grd_dst=${OPTARG} ;; # Destination grid-file
- G) grd_sng=${OPTARG} ;; # Grid generation string
- I) drc_in=${OPTARG} ;; # Input directory
- i) in_fl=${OPTARG} ;; # Input file
- j) job_usr=${OPTARG} ;; # Job simultaneity
+ D) dbg_lvl="${OPTARG}" ;; # Debugging level
+ d) dst_fl="${OPTARG}" ;; # Destination file
+ E) esmf_opt="${OPTARG}" ;; # ESMF options
+ g) grd_dst="${OPTARG}" ;; # Destination grid-file
+ G) grd_sng="${OPTARG}" ;; # Grid generation string
+ I) drc_in="${OPTARG}" ;; # Input directory
+ i) in_fl="${OPTARG}" ;; # Input file
+ j) job_usr="${OPTARG}" ;; # Job simultaneity
M) mlt_map_flg='No' ;; # Multi-map flag
- m) map_fl=${OPTARG} ;; # Map-file
- n) nco_usr=${OPTARG} ;; # NCO options
- O) drc_usr=${OPTARG} ;; # Output directory
- o) out_fl=${OPTARG} ;; # Output file
- P) pdq_typ=${OPTARG} ;; # Permutation type
- p) par_typ=${OPTARG} ;; # Parallelism type
- R) rgr_opt=${OPTARG} ;; # Regridding options
- s) grd_src=${OPTARG} ;; # Source grid-file
- T) tps_opt=${OPTARG} ;; # Tempest options
- t) thr_usr=${OPTARG} ;; # Thread number
- U) tmp_usr=${OPTARG} ;; # Temporary directory
- u) unq_usr=${OPTARG} ;; # Unique suffix
- V) var_rgr=${OPTARG} ;; # CF template variable
- v) var_lst=${OPTARG} ;; # Variables
- w) wgt_usr=${OPTARG} ;; # Weight-generator
- x) xtn_var=${OPTARG} ;; # Extensive variables
+ m) map_fl="${OPTARG}" ;; # Map-file
+ n) nco_usr="${OPTARG}" ;; # NCO options
+ O) drc_usr="${OPTARG}" ;; # Output directory
+ o) out_fl="${OPTARG}" ;; # Output file
+ P) pdq_typ="${OPTARG}" ;; # Permutation type
+ p) par_typ="${OPTARG}" ;; # Parallelism type
+ R) rgr_opt="${OPTARG}" ;; # Regridding options
+ s) grd_src="${OPTARG}" ;; # Source grid-file
+ T) tps_opt="${OPTARG}" ;; # Tempest options
+ t) thr_usr="${OPTARG}" ;; # Thread number
+ U) tmp_usr="${OPTARG}" ;; # Temporary directory
+ u) unq_usr="${OPTARG}" ;; # Unique suffix
+ V) var_rgr="${OPTARG}" ;; # CF template variable
+ v) var_lst="${OPTARG}" ;; # Variables
+ w) wgt_usr="${OPTARG}" ;; # Weight-generator
+ x) xtn_var="${OPTARG}" ;; # Extensive variables
+ -) LONG_OPTARG="${OPTARG#*=}"
+ case ${OPTARG} in
+ # Hereafter ${OPTARG} is long argument key, and ${LONG_OPTARG}, if any, is long argument value
+ # Long options with no argument, no short option counterpart
+ # Long options with argument, no short option counterpart
+ # Long options with short counterparts, ordered by short option key
+ alg_typ=?* | algorithm=?* | regrid_algorithm=?* ) alg_typ="${LONG_OPTARG}" ;; # -a # Algorithm
+ clm_flg=?* | climatology_flag=?* ) clm_flg='Yes' ;; # -C # Climo flag (undocumented)
+ dbg_lvl=?* | dbg=?* | debug=?* | debug_level=?* ) dbg_lvl="${LONG_OPTARG}" ;; # -d # Debugging level
+ dst_fl=?* | destination_file=?* | template_file=?* | template=?* ) dst_fl="${LONG_OPTARG}" ;; # -d # Destination file
+ esmf_opt=?* | esmf=?* | esmf_options=?* ) esmf_opt="${LONG_OPTARG}" ;; # -E # ESMF options
+ grd_dst=?* | grid_dest=?* | dest_grid=?* | destination_grid=?* ) grd_dst="${LONG_OPTARG}" ;; # -g # Destination grid-file
+ grd_sng=?* | grid_generation=?* | grid_gen=?* | grid_string=?* ) grd_sng="${LONG_OPTARG}" ;; # -G # Grid generation string
+ drc_in=?* | in_drc=?* | dir_in=?* | in_dir=?* | input=?* ) drc_in="${LONG_OPTARG}" ;; # -i # Input directory
+ in_fl=?* | in_file=?* | input_file=?* ) in_fl="${LONG_OPTARG}" ;; # -i # Input file
+ job_nbr=?* | job_number=?* | jobs=?* ) job_usr="${LONG_OPTARG}" ;; # -j # Job simultaneity
+ mlt_map | multimap | no_multimap | nomultimap ) mlt_map_flg='No' ;; # -M # Multi-map flag
+ mlt_map=?* | multimap=?* | no_multimap=?* | nomultimap=?* ) echo "No argument allowed for --${OPTARG switch}" >&2; exit 1 ;; # -M # Multi-map flag
+ map_fl=?* | map=?* | map_file=?* | rgr_map=?* | regrid_map=?* ) map_fl="${LONG_OPTARG}" ;; # -m # Map-file
+ nco_opt=?* | nco=?* | nco_options=?* ) nco_usr="${LONG_OPTARG}" ;; # -n # NCO options
+ drc_out=?* | out_drc=?* | dir_out=?* | out_dir=?* | output=?* ) drc_usr="${LONG_OPTARG}" ;; # -O # Output directory
+ out_fl=?* | output_file=?* | out_file=?* ) out_fl="${LONG_OPTARG}" ;; # -o # Output file
+ pdq_typ=?* | prm_typ=?* | permutation=?* | permute=?* ) pdq_typ="${LONG_OPTARG}" ;; # -P # Permutation type
+ par_typ=?* | par_md=?* | parallel_type=?* | parallel_mode=?* | parallel=?* ) par_typ="${LONG_OPTARG}" ;; # -p # Parallelism type
+ rgr_opt=?* | regrid_options=?* ) rgr_opt="${LONG_OPTARG}" ;; # -R # Regridding options
+ grd_src=?* | grid_source=?* | source_grid=?* | src_grd=?* ) grd_src="${LONG_OPTARG}" ;; # -s # Source grid-file
+ tps_opt=?* | tempest_opt=?* | tempest=?* | tempest_options=?* ) tps_opt="${LONG_OPTARG}" ;; # -T # Tempest options
+ thr_nbr=?* | thread_number=?* | thread=?* | threads=?* ) thr_usr="${LONG_OPTARG}" ;; # -t # Thread number
+ drc_tmp=?* | tmp_drc=?* | dir_tmp=?* | tmp_dir=?* | tmp=?* ) tmp_usr="${LONG_OPTARG}" ;; # -U # Temporary directory
+ unq_sfx=?* | unique_suffix=?* | suffix=?* ) unq_usr="${LONG_OPTARG}" ;; # -u # Unique suffix
+ var_rgr=?* | rgr_var=?* | var_cf=?* | cf_var=?* | cf_variable=?* ) var_rgr="${LONG_OPTARG}" ;; # -V # CF template variable
+ var_lst=?* | variable_list=?* | variable=?* | variables=?* ) var_lst="${LONG_OPTARG}" ;; # -v # Variables
+ wgt_gnr=?* | weight_generator=?* | generator=?* ) wgt_usr="${LONG_OPTARG}" ;; # -w # Weight-generator
+ xtn_var=?* | extensive=?* | var_xtn=?* | extensive_variables=?* ) xtn_var="${LONG_OPTARG}" ;; # -x # Extensive variables
+ '' ) break ;; # "--" terminates argument processing
+ * ) printf "\nERROR: Illegal option ${fnt_bld}--${OPTARG}${fnt_nrm}" >&2; fnc_usg_prn ;;
+ esac ;;
\?) # Unrecognized option
- printf "\nERROR: Option ${fnt_bld}-$OPTARG${fnt_nrm} not allowed"
+ printf "\nERROR: Option ${fnt_bld}-${OPTARG}${fnt_nrm} not allowed" >&2
fnc_usg_prn ;;
esac
done
@@ -765,7 +811,7 @@ if [ -f 'PET0.RegridWeightGen.Log' ]; then
fi # !PETO
printf "Started processing at `date`.\n"
printf "Running remap script ${spt_nm} from directory ${drc_spt}\n"
-printf "NCO version ${nco_vrs} from directory ${drc_nco}\n"
+printf "NCO binaries version ${nco_vrs} from directory ${drc_nco}\n"
printf "Input files in or relative to directory ${drc_in}\n"
printf "Intermediate/temporary files written to directory ${drc_tmp}\n"
printf "Output files to directory ${drc_out}\n"
diff --git a/data/ncclimo b/data/ncsplit
similarity index 76%
copy from data/ncclimo
copy to data/ncsplit
index 617935d..1cc60e7 100755
--- a/data/ncclimo
+++ b/data/ncsplit
@@ -1,7 +1,6 @@
#!/bin/bash
-# Purpose: Climatology script tailored to CESM'ish monthly input and ACME output guidelines
-# Produces and optionally regrids climatological monthly means, seasonal means, annual mean
+# Purpose: Split CESM'ish and ACME'ish raw history tape files into (possibly regridded) single variable timeseries
# Copyright (C) 2015-2016 Charlie Zender
# This file is part of NCO, the netCDF Operators. NCO is free software.
@@ -30,20 +29,20 @@
# Prerequisites: Bash, NCO
# Script could use other shells, e.g., dash (Debian default) after rewriting function definition and looping constructs
-# Source: https://github.com/nco/nco/tree/master/data/ncclimo
-# Documentation: http://nco.sf.net/nco.html#ncclimo
+# Source: https://github.com/nco/nco/tree/master/data/ncsplit
+# Documentation: http://nco.sf.net/nco.html#ncsplit
# Additional Documentation:
# HowTo: https://acme-climate.atlassian.net/wiki/display/ATM/Generating+Climo+files
-# ACME Climatology Requirements: https://acme-climate.atlassian.net/wiki/display/ATM/Climo+Files+-+v0.3+AMIP+runs
+# ACME Timeseries Requirements: https://acme-climate.atlassian.net/wiki/display/ATM/Climo+Files+-+v0.3+AMIP+runs
# Direct install:
-# scp ~/nco/data/ncclimo aims4.llnl.gov:bin
-# scp ~/nco/data/ncclimo cooley.alcf.anl.gov:bin
-# scp ~/nco/data/ncclimo cori.nersc.gov:bin_cori
-# scp ~/nco/data/ncclimo edison.nersc.gov:bin_edison
-# scp ~/nco/data/ncclimo rhea.ccs.ornl.gov:bin_rhea
-# scp ~/nco/data/ncclimo yellowstone.ucar.edu:bin
-# scp dust.ess.uci.edu:nco/data/ncclimo ~/bin
+# scp ~/nco/data/ncsplit aims4.llnl.gov:bin
+# scp ~/nco/data/ncsplit cooley.alcf.anl.gov:bin
+# scp ~/nco/data/ncsplit cori.nersc.gov:bin_cori
+# scp ~/nco/data/ncsplit edison.nersc.gov:bin_edison
+# scp ~/nco/data/ncsplit rhea.ccs.ornl.gov:bin_rhea
+# scp ~/nco/data/ncsplit yellowstone.ucar.edu:bin
+# scp dust.ess.uci.edu:nco/data/ncsplit ~/bin
# Set script name, directory, PID, run directory
drc_pwd=${PWD}
@@ -155,29 +154,31 @@ case "${HOSTNAME}" in
esac # !HOSTNAME
# Production usage:
-# ncclimo -c famipc5_ne30_v0.3_00003 -s 1980 -e 1983 -i /lustre/atlas1/cli115/world-shared/mbranst/famipc5_ne30_v0.3_00003-wget-test -o ${DATA}/ne30/clm
-# ncclimo -c famipc5_ne120_v0.3_00003 -s 1980 -e 1983 -i /lustre/atlas1/cli115/world-shared/mbranst/famipc5_ne120_v0.3_00003-wget-test -o ${DATA}/ne120/clm
-# ncclimo -c B1850C5e1_ne30 -s 2 -e 199 -i /lustre/atlas1/cli115/world-shared/mbranst/B1850C5e1_ne30/atm/hist -o ${DATA}/ne30/clm
+# ncsplit -c famipc5_ne30_v0.3_00003 -s 1980 -e 1983 -i /lustre/atlas1/cli115/world-shared/mbranst/famipc5_ne30_v0.3_00003-wget-test -o ${DATA}/ne30/clm
+# ncsplit -c famipc5_ne120_v0.3_00003 -s 1980 -e 1983 -i /lustre/atlas1/cli115/world-shared/mbranst/famipc5_ne120_v0.3_00003-wget-test -o ${DATA}/ne120/clm
+# ncsplit -c B1850C5e1_ne30 -s 2 -e 199 -i /lustre/atlas1/cli115/world-shared/mbranst/B1850C5e1_ne30/atm/hist -o ${DATA}/ne30/clm
# Incremental climo testing:
-# ncclimo -v FSNT,AODVIS -c famipc5_ne30_v0.3_00003 -s 1980 -e 1981 -i ${DATA}/ne30/raw -o ${DATA}/ne30/prv -r ${DATA}/maps/map_ne30np4_to_fv129x256_aave.20150901.nc
-# ncclimo -v FSNT,AODVIS -c famipc5_ne30_v0.3_00003 -s 1982 -e 1983 -i ${DATA}/ne30/raw -o ${DATA}/ne30/clm -r ${DATA}/maps/map_ne30np4_to_fv129x256_aave.20150901.nc -x ${DATA}/ne30/prv -X ${DATA}/ne30/xtn -S 1980
+# ncsplit -v FSNT,AODVIS -c famipc5_ne30_v0.3_00003 -s 1980 -e 1981 -i ${DATA}/ne30/raw -o ${DATA}/ne30/prv -r ${DATA}/maps/map_ne30np4_to_fv129x256_aave.20150901.nc
+# ncsplit -v FSNT,AODVIS -c famipc5_ne30_v0.3_00003 -s 1982 -e 1983 -i ${DATA}/ne30/raw -o ${DATA}/ne30/clm -r ${DATA}/maps/map_ne30np4_to_fv129x256_aave.20150901.nc -x ${DATA}/ne30/prv -X ${DATA}/ne30/xtn -S 1980
+# Binary climo testing:
+# ncsplit -v FSNT,AODVIS -c famipc5_ne30_v0.3_00003 -S 1980 -E 1981 -x ${DATA}/ne30/prv -s 1982 -e 1983 -i ${DATA}/ne30/clm -X ${DATA}/ne30/xtn
# Debugging and Benchmarking:
-# ncclimo -v FSNT,AODVIS -c famipc5_ne30_v0.3_00003 -s 1980 -e 1983 -i ${DATA}/ne30/raw -o ${DATA}/ne30/clm -r ${DATA}/maps/map_ne30np4_to_fv129x256_aave.20150901.nc
-# ncclimo -v TOTEXTTAU -c merra2_198001.nc4 -s 1980 -e 2015 -a sdd -i ${DATA}/merra2/raw -o ${DATA}/merra2/clm
-# ncclimo > ~/ncclimo.out 2>&1 &
-# ncclimo -c B1850C5e1_ne30 -s 2 -e 199 > ~/ncclimo.out 2>&1 &
-# ncclimo -c ne30_gx1.B1850c5d -s 6 -e 7 > ~/ncclimo.out 2>&1 &
-# ncclimo -d 2 -v FSNT -m cam2 -c essgcm14 -s 1 -e 20 -i ${DATA}/essgcm14 -o ${DATA}/anl > ~/ncclimo.out 2>&1 &
-# ncclimo -c famipc5_ne30_v0.3_00003 -s 1980 -e 1983 -i /lustre/atlas1/cli115/world-shared/mbranst/famipc5_ne30_v0.3_00003-wget-test -o ${DATA}/ne30/clm > ~/ncclimo.out 2>&1 &
-# ncclimo -c famipc5_ne120_v0.3_00003 -s 1980 -e 1983 -i /lustre/atlas1/cli115/world-shared/mbranst/famipc5_ne120_v0.3_00003-wget-test -o ${DATA}/ne120/clm > ~/ncclimo.out 2>&1 &
-# MPAS: Prior to running ncclimo on MPAS output, annotate missing values of input with, e.g.,
+# ncsplit -v FSNT,AODVIS -c famipc5_ne30_v0.3_00003 -s 1980 -e 1983 -i ${DATA}/ne30/raw -o ${DATA}/ne30/clm -r ${DATA}/maps/map_ne30np4_to_fv129x256_aave.20150901.nc
+# ncsplit -v TOTEXTTAU -c merra2_198001.nc4 -s 1980 -e 2015 -a sdd -i ${DATA}/merra2/raw -o ${DATA}/merra2/clm
+# ncsplit > ~/ncsplit.out 2>&1 &
+# ncsplit -c B1850C5e1_ne30 -s 2 -e 199 > ~/ncsplit.out 2>&1 &
+# ncsplit -c ne30_gx1.B1850c5d -s 6 -e 7 > ~/ncsplit.out 2>&1 &
+# ncsplit -d 2 -v FSNT -m cam2 -c essgcm14 -s 1 -e 20 -i ${DATA}/essgcm14 -o ${DATA}/anl > ~/ncsplit.out 2>&1 &
+# ncsplit -c famipc5_ne30_v0.3_00003 -s 1980 -e 1983 -i /lustre/atlas1/cli115/world-shared/mbranst/famipc5_ne30_v0.3_00003-wget-test -o ${DATA}/ne30/clm > ~/ncsplit.out 2>&1 &
+# ncsplit -c famipc5_ne120_v0.3_00003 -s 1980 -e 1983 -i /lustre/atlas1/cli115/world-shared/mbranst/famipc5_ne120_v0.3_00003-wget-test -o ${DATA}/ne120/clm > ~/ncsplit.out 2>&1 &
+# MPAS: Prior to running ncsplit on MPAS output, annotate missing values of input with, e.g.,
# for fl in `ls hist.*` ; do
# ncatted -O -t -a _FillValue,,o,d,-9.99999979021476795361e+33 ${fl}
# done
-# ncclimo -v temperature -c hist -s 2 -e 3 -m ocn -i /lustre/atlas1/cli112/proj-shared/golaz/ACME_simulations/20160121.A_B2000ATMMOD.ne30_oEC.titan.a00/run -r ${DATA}/maps/map_oEC60to30_to_t62_bilin.20160301.nc -o ${DATA}/mpas/clm > ~/ncclimo.out 2>&1 &
-# ncclimo -v iceAreaCell -c hist -s 2 -e 3 -m ice -i /lustre/atlas1/cli112/proj-shared/golaz/ACME_simulations/20160121.A_B2000ATMMOD.ne30_oEC.titan.a00/run -r ${DATA}/maps/map_oEC60to30_to_t62_bilin.20160301.nc -o ${DATA}/mpas/clm > ~/ncclimo.out 2>&1 &
+# ncsplit -v temperature -c hist -s 2 -e 3 -m ocn -i /lustre/atlas1/cli112/proj-shared/golaz/ACME_simulations/20160121.A_B2000ATMMOD.ne30_oEC.titan.a00/run -r ${DATA}/maps/map_oEC60to30_to_t62_bilin.20160301.nc -o ${DATA}/mpas/clm > ~/ncsplit.out 2>&1 &
+# ncsplit -v iceAreaCell -c hist -s 2 -e 3 -m ice -i /lustre/atlas1/cli112/proj-shared/golaz/ACME_simulations/20160121.A_B2000ATMMOD.ne30_oEC.titan.a00/run -r ${DATA}/maps/map_oEC60to30_to_t62_bilin.20160301.nc -o ${DATA}/mpas/clm > ~/ncsplit.out 2>&1 &
# Best performance on resolutions finer than ne30 (~1x1 degree) requires a job scheduler/batch processor
# Cobalt (cooley), SLURM (cori,edison), Torque (a PBS-variant) (hopper), and PBS (rhea) schedulers allow both interactive and non-interactive (i.e., script) batch jobs
@@ -197,29 +198,29 @@ esac # !HOSTNAME
# Cobalt: qsub -I, qsub, qstat, qdel
# PBS: qsub -I, qsub, qstat, qdel
# SLURM: salloc, sbatch, squeue, scancel
-# Interactive queue: a) Reserve nodes and acquire prompt on control node b) Execute ncclimo command interactively
-# Cooley: qsub -I -A HiRes_EarthSys --nodecount=12 --time=00:30:00 --jobname=ncclimo
-# Cori: salloc -A acme --nodes=12 --partition=debug --time=00:30:00 --job-name=ncclimo # NB: 30 minute limit, Edison too
-# Hopper: qsub -I -A acme -V -l mppwidth=288 -l walltime=00:30:00 -q debug -N ncclimo # deprecated, old Edison
-# Rhea: qsub -I -A CLI115 -V -l nodes=12 -l walltime=00:30:00 -N ncclimo # Bigmem: -l partition=gpu
+# Interactive queue: a) Reserve nodes and acquire prompt on control node b) Execute ncsplit command interactively
+# Cooley: qsub -I -A HiRes_EarthSys --nodecount=12 --time=00:30:00 --jobname=ncsplit
+# Cori: salloc -A acme --nodes=12 --partition=debug --time=00:30:00 --job-name=ncsplit # NB: 30 minute limit, Edison too
+# Hopper: qsub -I -A acme -V -l mppwidth=288 -l walltime=00:30:00 -q debug -N ncsplit # deprecated, old Edison
+# Rhea: qsub -I -A CLI115 -V -l nodes=12 -l walltime=00:30:00 -N ncsplit # Bigmem: -l partition=gpu
# Yellow: fxm # Bigmem:
-# Non-interactive batch procedure: a) Store ncclimo command in ncclimo.[cobalt|pbs|slurm] b) qsub ncclimo.[cobalt|pbs|slurm]
+# Non-interactive batch procedure: a) Store ncsplit command in ncsplit.[cobalt|pbs|slurm] b) qsub ncsplit.[cobalt|pbs|slurm]
# Non-interactive batch queue differences (besides argument syntax):
# 1. Cobalt and SLURM require initial 'shebang' line to specify the shell interpreter (not required on PBS)
# 2. Cobalt appends stdout/stderr to existing output files, if any, whereas PBS overwrites existing files
# 3. Cobalt uses ${COBALT_NODEFILE} and (NA) whereas PBS uses ${PBS_NODEFILE} and ${PBS_NUM_PPN}, respectively, and SLURM uses ${SLURM_NODELIST} and ${SLURM_CPUS_ON_NODE}, respectively
# 4. SLURM automatically combines stdout and stderr, yet does not understand tilde (~ = home directory) expansion in error/output filenames
# Differences 1 & 2 impose slightly different invocations; difference 3 requires abstracting environment variables; difference 4 requires omitting ~'s
-# Cooley a): /bin/rm -f ~/ncclimo.err ~/ncclimo.out
-# echo '#!/bin/bash' > ~/ncclimo.cobalt
-# echo "ncclimo -d 1 -p mpi -c b1850c5_m2a -s 55 -e 58 -i /home/taylorm/scratch1.qtang/b1850c5_m2a/run -o ${DATA}/ne120/clm" >> ~/ncclimo.cobalt;chmod a+x ~/ncclimo.cobalt
-# Cori,Edison a): echo '#!/bin/bash' > ~/ncclimo.slurm
-# echo "ncclimo -a scd -d 1 -p mpi -c famipc5_ne30_v0.3_00003 -s 1980 -e 1983 -i ${DATA}/ne30/raw -o ${DATA}/ne30/clm -r ${DATA}/maps/map_ne30np4_to_fv129x256_aave.20150901.nc" >> ~/ncclimo.slurm;chmod a+x ~/ncclimo.slurm
-# Rhea a): echo "ncclimo -a scd -d 1 -p mpi -c famipc5_ne120_v0.3_00003 -s 1980 -e 1983 -i /lustre/atlas1/cli115/world-shared/mbranst/famipc5_ne120_v0.3_00003-wget-test -o ${DATA}/ne120/clm -r ${DATA}/maps/map_ne120np4_to_fv257x512_aave.20150901.nc" > ~/ncclimo.pbs;chmod a+x ~/ncclimo.pbs
-# Cooley b): qsub -A HiRes_EarthSys --nodecount=12 --time=00:30:00 --jobname ncclimo --error ~/ncclimo.err --output ~/ncclimo.out --notify zender at uci.edu ~/ncclimo.cobalt
-# Cori,Edison b): sbatch -A acme --nodes=12 --time=00:30:00 --partition=regular --job-name=ncclimo --mail-type=END --output=ncclimo.out ~/ncclimo.slurm
-# Hopper b): qsub -A acme -V -l mppwidth=288 -l walltime=00:30:00 -q regular -N ncclimo -j oe -m e -o ~/ncclimo.out ~/ncclimo.pbs
-# Rhea b): qsub -A CLI115 -V -l nodes=12 -l walltime=00:30:00 -N ncclimo -j oe -m e -o ~/ncclimo.out ~/ncclimo.pbs
+# Cooley a): /bin/rm -f ~/ncsplit.err ~/ncsplit.out
+# echo '#!/bin/bash' > ~/ncsplit.cobalt
+# echo "ncsplit -d 1 -p mpi -c b1850c5_m2a -s 55 -e 58 -i /home/taylorm/scratch1.qtang/b1850c5_m2a/run -o ${DATA}/ne120/clm" >> ~/ncsplit.cobalt;chmod a+x ~/ncsplit.cobalt
+# Cori,Edison a): echo '#!/bin/bash' > ~/ncsplit.slurm
+# echo "ncsplit -a scd -d 1 -p mpi -c famipc5_ne30_v0.3_00003 -s 1980 -e 1983 -i ${DATA}/ne30/raw -o ${DATA}/ne30/clm -r ${DATA}/maps/map_ne30np4_to_fv129x256_aave.20150901.nc" >> ~/ncsplit.slurm;chmod a+x ~/ncsplit.slurm
+# Rhea a): echo "ncsplit -a scd -d 1 -p mpi -c famipc5_ne120_v0.3_00003 -s 1980 -e 1983 -i /lustre/atlas1/cli115/world-shared/mbranst/famipc5_ne120_v0.3_00003-wget-test -o ${DATA}/ne120/clm -r ${DATA}/maps/map_ne120np4_to_fv257x512_aave.20150901.nc" > ~/ncsplit.pbs;chmod a+x ~/ncsplit.pbs
+# Cooley b): qsub -A HiRes_EarthSys --nodecount=12 --time=00:30:00 --jobname ncsplit --error ~/ncsplit.err --output ~/ncsplit.out --notify zender at uci.edu ~/ncsplit.cobalt
+# Cori,Edison b): sbatch -A acme --nodes=12 --time=00:30:00 --partition=regular --job-name=ncsplit --mail-type=END --output=ncsplit.out ~/ncsplit.slurm
+# Hopper b): qsub -A acme -V -l mppwidth=288 -l walltime=00:30:00 -q regular -N ncsplit -j oe -m e -o ~/ncsplit.out ~/ncsplit.pbs
+# Rhea b): qsub -A CLI115 -V -l nodes=12 -l walltime=00:30:00 -N ncsplit -j oe -m e -o ~/ncsplit.out ~/ncsplit.pbs
# Normal use: Set five "mandatory" inputs (caseid, yr_srt, yr_end, drc_in, drc_out), and possibly rgr_map, on command line
# caseid: Simulation name (filenames must start with ${caseid})
@@ -229,7 +230,7 @@ esac # !HOSTNAME
# SCD mode ignores Jan-Nov of yr_srt
# Dec of yr_end is excluded from the seasonal and monthly analysis in SCD mode
# yr_end should, and for SDD mode must, contain complete year of output
-# drc_out: Output directory for processed native grid climatology ("climo files")
+# drc_out: Output directory for processed native grid timeseries ("climo files")
# User needs write permission for ${drc_out}
# rgr_map: Regridding map, if non-NULL, invoke regridder with specified map on output datasets
# Pass options intended exclusively for the NCO regridder as arguments to the -R switch
@@ -237,7 +238,7 @@ esac # !HOSTNAME
# yr_end: Year of last January to analyze
# Other options (often their default settings work well):
-# clm_md: Climatology mode, i.e., how to treat December. One of two options:
+# clm_md: Timeseries mode, i.e., how to treat December. One of two options:
# Seasonally-contiguous-december (SCD) mode (clm_md=scd) (default)
# Seasonally-discontiguous-december (SDD) mode (clm_md=sdd)
# Both modes use an integral multiple of 12 months, and _never alter any input files_
@@ -281,7 +282,7 @@ esac # !HOSTNAME
# '-D 2 -7 -L 1' for NCO debugging level 2, netCDF4-classic output, compression level 1
# '--no_tmp_fl -x' to skip temporary files, turn extraction into exclusion list
# rgr_opt: String of options (besides thread-number) to pass-through exclusively to NCO regridder, e.g.,
-# ncclimo -m clm2 ... -R col_nm=lndgrid -r map.nc ...
+# ncsplit -m clm2 ... -R col_nm=lndgrid -r map.nc ...
# thr_nbr: Thread number to use in NCO regridder, '-t 1' for one thread, '-t 2' for two threads...
# Set NCO version and directory
@@ -290,7 +291,7 @@ if [ -z "${nco_exe}" ]; then
echo "ERROR: Unable to find NCO, nco_exe = ${nco_exe}"
exit 1
fi # !nco_exe
-# Use stackoverflow method to find NCO directory
+# Use StackOverflow method to find NCO directory
while [ -h "${nco_exe}" ]; do
drc_nco="$( cd -P "$( dirname "${nco_exe}" )" && pwd )"
nco_exe="$(readlink "${nco_exe}")"
@@ -310,10 +311,11 @@ fi # !TERM
# Defaults for command-line options and some derived variables
# Modify these defaults to save typing later
bnd_nm='nbnd' # [sng] Bounds dimension name (e.g., 'nbnd', 'tbnd')
-clm_md='scd' # [sng] Climatology mode ('scd' or 'sdd' as per above)
+bnr_flg='No' # [sng] Binary method
+clm_md='scd' # [sng] Timeseries mode ('scd' or 'sdd' as per above)
caseid='' # [sng] Case ID
caseid_xmp='famipc5_ne30_v0.3_00003' # [sng] Case ID for examples
-cf_flg='Yes' # [sng] Produce CF climatology attribute?
+cf_flg='Yes' # [sng] Produce CF timeseries attribute?
lnk_flg='Yes' # [sng] Link ACME-climo to AMWG-climo filenames
dbg_lvl=0 # [nbr] Debugging level
drc_in='' # [sng] Input file directory
@@ -336,6 +338,7 @@ mdl_nm='cam' # [sng] Model name (e.g., 'cam', 'cam2', 'cice', 'cism', 'clm', 'cl
mdl_typ='cesm' # [sng] Model type ('cesm', 'mpas') (for filenames and regridding)
mpi_flg='No' # [sng] Parallelize over nodes
nco_opt='--no_tmp_fl' # [sng] NCO options (e.g., '-7 -D 1 -L 1')
+ncr_flg='No' # [sng] Incremental method
nd_nbr=1 # [nbr] Number of nodes
par_opt='' # [sng] Parallel options to shell
par_typ='bck' # [sng] Parallelism type
@@ -348,7 +351,7 @@ rgr_opt='' # [sng] Regridding options (e.g., '--rgr col_nm=lndgrid', '--rgr col_
thr_nbr=2 # [nbr] Thread number for regridder
#var_lst='FSNT,AODVIS' # [sng] Variables to process (empty means all)
var_lst='' # [sng] Variables to process (empty means all)
-xtn_flg='No' # [sng] Extend previous climatology with current data
+xtn_flg='No' # [sng] Produce extended climatology
yr_end='1983' # [yr] End year
yr_srt='1980' # [yr] Start year
@@ -361,6 +364,7 @@ function fnc_usg_prn { # NB: dash supports fnc_nm (){} syntax, not function fnc_
echo "${fnt_rvr}-b${fnt_nrm} ${fnt_bld}bnd_nm${fnt_nrm} Bounds dimension name (default ${fnt_bld}${bnd_nm}${fnt_nrm})"
echo "${fnt_rvr}-c${fnt_nrm} ${fnt_bld}caseid${fnt_nrm} Case ID string (default ${fnt_bld}${caseid}${fnt_nrm})"
echo "${fnt_rvr}-d${fnt_nrm} ${fnt_bld}dbg_lvl${fnt_nrm} Debug level (default ${fnt_bld}${dbg_lvl}${fnt_nrm})"
+ echo "${fnt_rvr}-E${fnt_nrm} ${fnt_bld}yr_prv${fnt_nrm} Start year previous climo (empty means none) (default ${fnt_bld}${yr_end_prv}${fnt_nrm})"
echo "${fnt_rvr}-e${fnt_nrm} ${fnt_bld}yr_end${fnt_nrm} End year (default ${fnt_bld}${yr_end}${fnt_nrm})"
echo "${fnt_rvr}-f${fnt_nrm} ${fnt_bld}fml_nm${fnt_nrm} Family name (nickname) (empty means none) (default ${fnt_bld}${fml_nm}${fnt_nrm})"
echo "${fnt_rvr}-h${fnt_nrm} ${fnt_bld}hst_nm${fnt_nrm} History volume name (default ${fnt_bld}${hst_nm}${fnt_nrm})"
@@ -377,7 +381,7 @@ function fnc_usg_prn { # NB: dash supports fnc_nm (){} syntax, not function fnc_
echo "${fnt_rvr}-s${fnt_nrm} ${fnt_bld}yr_srt${fnt_nrm} Start year (default ${fnt_bld}${yr_srt}${fnt_nrm})"
echo "${fnt_rvr}-S${fnt_nrm} ${fnt_bld}yr_prv${fnt_nrm} Start year previous climo (empty means none) (default ${fnt_bld}${yr_srt_prv}${fnt_nrm})"
echo "${fnt_rvr}-v${fnt_nrm} ${fnt_bld}var_lst${fnt_nrm} Variable list (empty means all) (default ${fnt_bld}${var_lst}${fnt_nrm})"
- echo "${fnt_rvr}-X${fnt_nrm} ${fnt_bld}drc_xtn${fnt_nrm} Extended climo directory (default ${fnt_bld}${drc_xtn}${fnt_nrm})"
+ echo "${fnt_rvr}-X${fnt_nrm} ${fnt_bld}drc_xtn${fnt_nrm} Extended climo directory (default ${fnt_bld}${drcf_xtn}${fnt_nrm})"
echo "${fnt_rvr}-x${fnt_nrm} ${fnt_bld}drc_prv${fnt_nrm} Previous climo directory (default ${fnt_bld}${drc_prv}${fnt_nrm})"
echo "${fnt_rvr}-Y${fnt_nrm} ${fnt_bld}rgr_xtn${fnt_nrm} Regridded extended climo directory (default ${fnt_bld}${drc_rgr_xtn}${fnt_nrm})"
echo "${fnt_rvr}-y${fnt_nrm} ${fnt_bld}rgr_prv${fnt_nrm} Regridded previous climo directory (default ${fnt_bld}${drc_rgr_prv}${fnt_nrm})"
@@ -388,13 +392,13 @@ function fnc_usg_prn { # NB: dash supports fnc_nm (){} syntax, not function fnc_
printf " ${fnt_bld}$spt_nm -c hist -m ice -s ${yr_srt} -e ${yr_end} -i ${drc_in_mps} -o ${drc_out_mps} -r ~zender/data/maps/map_oEC60to30_to_t62_bilin.20160301.nc ${fnt_nrm}\n"
printf " ${fnt_bld}$spt_nm -c hist -m ocn -p mpi -s 1 -e 5 -i ${drc_in_mps} -o ${drc_out_mps} -r ~zender/data/maps/map_oEC60to30_to_t62_bilin.20160301.nc ${fnt_nrm}\n\n"
printf "Interactive batch queues on ...\n"
- printf "cooley: qsub -I -A HiRes_EarthSys --nodecount=1 --time=00:30:00 --jobname=ncclimo\n"
- printf "cori : salloc -A acme --nodes=1 --time=00:30:00 --partition=debug --job-name=ncclimo\n"
- printf "edison: salloc -A acme --nodes=1 --time=00:30:00 --partition=debug --job-name=ncclimo\n"
- printf "rhea : qsub -I -A CLI115 -V -l nodes=1 -l walltime=00:30:00 -N ncclimo\n"
- printf "rhea : qsub -I -A CLI115 -V -l nodes=1 -l walltime=00:30:00 -lpartition=gpu -N ncclimo # Bigmem\n\n"
-# echo "3-yrs ne30: ncclimo -c famipc5_ne30_v0.3_00003 -s 1980 -e 1982 -i /lustre/atlas1/cli115/world-shared/mbranst/famipc5_ne30_v0.3_00003-wget-test -o ${DATA}/ne30/clm -r ~zender/data/maps/map_ne30np4_to_fv129x256_aave.20150901.nc > ~/ncclimo.out 2>&1 &"
-# printf "3-yrs ne120: ncclimo -p mpi -c famipc5_ne120_v0.3_00003 -s 1980 -e 1982 -i /lustre/atlas1/cli115/world-shared/mbranst/famipc5_ne120_v0.3_00003-wget-test -o ${DATA}/ne120/clm -r ~zender/data/maps/map_ne120np4_to_fv257x512_aave.20150901.nc > ~/ncclimo.out 2>&1 &\n\n"
+ printf "cooley: qsub -I -A HiRes_EarthSys --nodecount=1 --time=00:30:00 --jobname=ncsplit\n"
+ printf "cori : salloc -A acme --nodes=1 --time=00:30:00 --partition=debug --job-name=ncsplit\n"
+ printf "edison: salloc -A acme --nodes=1 --time=00:30:00 --partition=debug --job-name=ncsplit\n"
+ printf "rhea : qsub -I -A CLI115 -V -l nodes=1 -l walltime=00:30:00 -N ncsplit\n"
+ printf "rhea : qsub -I -A CLI115 -V -l nodes=1 -l walltime=00:30:00 -lpartition=gpu -N ncsplit # Bigmem\n\n"
+# echo "3-yrs ne30: ncsplit -c famipc5_ne30_v0.3_00003 -s 1980 -e 1982 -i /lustre/atlas1/cli115/world-shared/mbranst/famipc5_ne30_v0.3_00003-wget-test -o ${DATA}/ne30/clm -r ~zender/data/maps/map_ne30np4_to_fv129x256_aave.20150901.nc > ~/ncsplit.out 2>&1 &"
+# printf "3-yrs ne120: ncsplit -p mpi -c famipc5_ne120_v0.3_00003 -s 1980 -e 1982 -i /lustre/atlas1/cli115/world-shared/mbranst/famipc5_ne120_v0.3_00003-wget-test -o ${DATA}/ne120/clm -r ~zender/data/maps/map_ne120np4_to_fv257x512_aave.20150901.nc > ~/ncsplit.out 2>&1 &\n\n"
exit 1
} # end fnc_usg_prn()
@@ -443,12 +447,13 @@ fi # !arg_nbr
# http://stackoverflow.com/questions/402377/using-getopts-in-bash-shell-script-to-get-long-and-short-command-line-options
# http://tuxtweaks.com/2014/05/bash-getopts
cmd_ln="${spt_nm} ${@}"
-while getopts :a:b:c:d:e:f:h:i:l:m:n:O:o:p:R:r:S:s:t:v:X:x:Y:y: OPT; do
+while getopts :a:b:c:d:E:e:f:h:i:l:m:n:O:o:p:R:r:S:s:t:v:X:x:Y:y: OPT; do
case ${OPT} in
a) clm_md=${OPTARG} ;; # Climatology mode
b) bnd_nm=${OPTARG} ;; # Bounds dimension name
c) caseid=${OPTARG} ;; # CASEID
d) dbg_lvl=${OPTARG} ;; # Debugging level
+ E) yr_end_prv=${OPTARG} ;; # End year previous
e) yr_end=${OPTARG} ;; # End year
f) fml_nm=${OPTARG} ;; # Family name
h) hst_nm=${OPTARG} ;; # History tape name
@@ -520,6 +525,17 @@ yyyy_end=`printf "%04d" ${yr_end_rth}`
let yr_endm1=${yr_end_rth}-1
let yr_nbr=${yr_end_rth}-${yr_srt_rth}+1
+if [ -n "${yr_srt_prv}" ]; then
+ # Specifying only yr_srt_prv implies incremental method
+ # Specifying both yr_srt_prv and yr_end_prv implies binary method
+ xtn_flg='Yes'
+ if [ -n "${yr_end_prv}" ]; then
+ bnr_flg='Yes'
+ else # !yr_end_prv binary method
+ ncr_flg='Yes'
+ fi # !yr_end_prv binary method
+fi # !yr_srt_prv extended climo
+
# Derived variables
out_nm=${caseid}
if [ "${caseid}" = 'hist' ]; then
@@ -611,7 +627,7 @@ if [ "${mpi_flg}" = 'Yes' ]; then
# SLURM returns compressed lists (e.g., "nid00[076-078,559-567]")
# Convert this to file with uncompressed list (like Cobalt, PBS)
# http://www.ceci-hpc.be/slurm_faq.html#Q12
- nd_fl='ncclimo.slurm_nodelist'
+ nd_fl='ncsplit.slurm_nodelist'
nd_lst=`scontrol show hostname ${SLURM_NODELIST}`
echo ${nd_lst} > ${nd_fl}
else
@@ -665,6 +681,7 @@ fi # !mpi
# Print initial state
if [ ${dbg_lvl} -ge 1 ]; then
printf "dbg: bnd_nm = ${bnd_nm}\n"
+ printf "dbg: bnr_flg = ${bnr_flg}\n"
printf "dbg: caseid = ${caseid}\n"
printf "dbg: cf_flg = ${cf_flg}\n"
printf "dbg: clm_md = ${clm_md}\n"
@@ -685,6 +702,7 @@ if [ ${dbg_lvl} -ge 1 ]; then
printf "dbg: mdl_nm = ${mdl_nm}\n"
printf "dbg: mpi_flg = ${mpi_flg}\n"
printf "dbg: nco_opt = ${nco_opt}\n"
+ printf "dbg: ncr_flg = ${ncr_flg}\n"
printf "dbg: nd_nbr = ${nd_nbr}\n"
printf "dbg: par_typ = ${par_typ}\n"
printf "dbg: rgr_map = ${rgr_map}\n"
@@ -714,40 +732,51 @@ mkdir -p ${drc_rgr}
# Human-readable summary
date_srt=$(date +"%s")
if [ ${dbg_lvl} -ge 0 ]; then
- printf "Climatology generation invoked with command:\n"
+ printf "Timeseries generation invoked with command:\n"
echo "${cmd_ln}"
fi # !dbg
-printf "Started climatology generation for dataset ${caseid} at `date`.\n"
-printf "Running climatology script ${spt_nm} from directory ${drc_spt}\n"
-printf "NCO version ${nco_vrs} from directory ${drc_nco}\n"
-printf "Input files in directory ${drc_in}\n"
+printf "Started timeseries generation for dataset ${caseid} at `date`.\n"
+printf "Running timeseries script ${spt_nm} from directory ${drc_spt}\n"
+printf "NCO binaries version ${nco_vrs} from directory ${drc_nco}\n"
+if [ "${xtn_flg}" = 'No' ]; then
+ printf "Producing standard timeseries from raw monthly input files in directory ${drc_in}\n"
+ printf "Output files to directory ${drc_out}\n"
+fi # !xtn_flg
+if [ "${bnr_flg}" = 'Yes' ]; then
+ printf "Producing extended timeseries in binary mode: Will combine pre-computed timeseries in directory ${drc_prv} with pre-computed timeseries in directory ${drc_in}\n"
+ printf "Output files to directory ${drc_xtn}\n"
+fi # !bnr_flg
+if [ "${ncr_flg}" = 'Yes' ]; then
+ printf "Producing extended timeseries in incremental mode: Pre-computed timeseries in directory ${drc_prv} will be incremented by raw monthly input files in directory ${drc_in}\n"
+ printf "Output files to directory ${drc_xtn}\n"
+fi # !ncr_flg
#printf "Intermediate/temporary files written to directory ${drc_tmp}\n"
-printf "Output files to directory ${drc_out}\n"
+if [ "${bnr_flg}" = 'No' ]; then
+ printf "Timeseries from ${yr_nbr} years of contiguous raw monthly data touching ${yr_cln} calendar years from YYYYMM = ${yyyy_clm_srt_dec}${mm_ann_srt} to ${yyyy_end}${mm_ann_end}.\n"
+fi # !bnr_flg
if [ "${mdl_typ}" = 'yyyymm' ]; then
printf "Filenames will be constructed with generic conventions as ${bs_nm}_YYYYMM.${bs_sfx}\n"
else # !mdl_typ
printf "Filenames will be constructed with CESM'ish or ACME'ish conventions.\n"
fi # !mdl_typ
-printf "Climatology from ${yr_nbr} years of contiguous data crossing ${yr_cln} calendar years from YYYYMM = ${yyyy_clm_srt_dec}${mm_ann_srt} to ${yyyy_end}${mm_ann_end}.\n"
if [ ${clm_md} = 'scd' ]; then
printf "Winter statistics based on seasonally contiguous December (scd-mode): DJF sequences are consecutive months that cross calendar-year boundaries.\n"
else
printf "Winter statistics based on seasonally discontiguous December (sdd-mode): DJF sequences comprise three months from the same calendar year.\n"
fi # !scd
if [ ${cf_flg} = 'Yes' ]; then
- printf "Annotation for CF climatology attribute and climatology_bounds variable will be performed.\n"
+ printf "Annotation for CF timeseries attribute and timeseries_bounds variable will be performed.\n"
else
- printf "Annotation for CF climatology attribute and climatology_bounds variable will not be performed.\n"
+ printf "Annotation for CF timeseries attribute and timeseries_bounds variable will not be performed.\n"
fi # !cf
if [ -n "${rgr_map}" ]; then
- printf "This climatology will be regridded.\n"
+ printf "This timeseries will be regridded.\n"
else
- printf "This climatology will not be regridded.\n"
+ printf "This timeseries will not be regridded.\n"
fi # !rgr
# Block 1: Climatological monthly means
# Block 1 Loop 1: Generate, check, and store (but do not yet execute) monthly commands
-printf "Generating climatology...\n"
clm_idx=0
for mth in {01..12}; do
let clm_idx=${clm_idx}+1
@@ -778,13 +807,19 @@ for mth in {01..12}; do
yyyy_clm_srt=${yyyy_clm_srt_dec}
yyyy_clm_end=${yyyy_clm_end_dec}
fi # !scd
- for fl_in in ${yr_fl} ; do
- if [ ! -f "${drc_in}/${fl_in}" ]; then
- echo "ERROR: Unable to find required input file ${drc_in}/${fl_in}"
- echo "HINT: All files implied to exist by the climatology bounds (start/end year/month) must be in ${drc_in} before ${spt_nm} will proceed"
- exit 1
- fi # ! -f
- done # !fl_in
+ # Check for raw monthly file existence only if file will be used
+ if [ "${bnr_flg}" = 'No' ]; then
+ for fl_in in ${yr_fl} ; do
+ if [ ! -f "${drc_in}/${fl_in}" ]; then
+ echo "ERROR: Unable to find required input file ${drc_in}/${fl_in}"
+ echo "HINT: All files implied to exist by the timeseries bounds (start/end year/month) must be in ${drc_in} before ${spt_nm} will proceed"
+ exit 1
+ fi # ! -f
+ done # !fl_in
+ else # !bnr_flg
+ # In binary mode drc_out is actually used to locate input files from timeseries B (same as output files in incremental mode)
+ drc_out="${drc_in}"
+ fi # !bnr_flg
fl_out[${clm_idx}]="${drc_out}/${out_nm}_${MM}_${yyyy_clm_srt}${MM}_${yyyy_clm_end}${MM}_climo.nc"
cmd_clm[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncra --cb -O ${nco_opt} -p ${drc_in} ${yr_fl} ${fl_out[${clm_idx}]}"
done # !mth
@@ -821,266 +856,278 @@ for ((clm_idx=1;clm_idx<=17;clm_idx++)); do
fi # !drc_rgr
fi # !rgr_map
done # !clm_idx
-
-# Block 1 Loop 2: Execute and/or echo monthly climatology commands
-for ((clm_idx=1;clm_idx<=12;clm_idx++)); do
- printf "Climatological monthly mean for month ${clm_idx} ...\n"
- if [ ${dbg_lvl} -ge 1 ]; then
- echo ${cmd_clm[${clm_idx}]}
- fi # !dbg
- if [ ${dbg_lvl} -le 1 ]; then
- if [ -z "${par_opt}" ]; then
- eval ${cmd_clm[${clm_idx}]}
- if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR monthly climo cmd_clm[${clm_idx}] failed. Debug this:\n${cmd_clm[${clm_idx}]}\n"
- exit 1
- fi # !err
- else # !par_opt
- eval ${cmd_clm[${clm_idx}]} ${par_opt} # eval always returns 0 on backgrounded processes
- clm_pid[${clm_idx}]=$!
- # Potential alternatives to eval:
-# eval "${cmd_clm[${clm_idx}]}" # borken
-# ${cmd_clm[${clm_idx}]} # borken
-# "${cmd_clm[${clm_idx}]}" # borken
-# exec "${cmd_clm[${clm_idx}]}" # borken
-# $(${cmd_clm[${clm_idx}]}) # borken
-# $("${cmd_clm[${clm_idx}]}") # works (when & inside cmd quotes)
- fi # !par_opt
- fi # !dbg
-done # !clm_idx
-if [ -n "${par_opt}" ]; then
- for ((clm_idx=1;clm_idx<=12;clm_idx++)); do
- wait ${clm_pid[${clm_idx}]}
- if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR monthly climo cmd_clm[${clm_idx}] failed. Debug this:\n${cmd_clm[${clm_idx}]}\n"
- exit 1
- fi # !err
- done # !clm_idx
-fi # !par_opt
-wait
-# Block 1: Loop 4: Regrid first twelve files. Load-balance by using idle nodes (nodes not used for seasonal climatologies).
-if [ -n "${rgr_map}" ]; then
- printf "Regrid monthly data...\n"
+# Many subsequent blocks only executed for normal and incremental climos, not for binary climos
+if [ "${bnr_flg}" = 'No' ]; then
+
+ # Block 1 Loop 2: Execute and/or echo monthly timeseries commands
+ printf "Generating timeseries...\n"
for ((clm_idx=1;clm_idx<=12;clm_idx++)); do
- # NB: Months, seasons, files are 1-based ([1..12], [13..16], [1..17]), nodes are 0-based ([0..11])
- let nd_idx=$(((clm_idx-1+4) % nd_nbr))
- if [ ${nd_idx} -lt 4 ]; then
- let nd_idx=${nd_idx}+4
- fi # !nd
- cmd_rgr[${clm_idx}]="${cmd_mpi[${nd_idx}]} ncks -t ${thr_nbr} -O ${nco_opt} ${rgr_opt} ${fl_out[${clm_idx}]} ${fl_rgr[${clm_idx}]}"
- if [ "${mdl_typ}" = 'mpas' ]; then
- cmd_rgr[${clm_idx}]="${cmd_mpi[${nd_idx}]} ncremap -C -u .pid${spt_pid}.climo.${clm_idx}.tmp -P mpas -t ${thr_nbr} -m ${rgr_map} -i ${fl_out[${clm_idx}]} -o ${fl_rgr[${clm_idx}]}"
- fi # !mdl_typ
+ printf "Climatological monthly mean for month ${clm_idx} ...\n"
if [ ${dbg_lvl} -ge 1 ]; then
- echo ${cmd_rgr[${clm_idx}]}
+ echo ${cmd_clm[${clm_idx}]}
fi # !dbg
if [ ${dbg_lvl} -le 1 ]; then
if [ -z "${par_opt}" ]; then
- eval ${cmd_rgr[${clm_idx}]}
+ eval ${cmd_clm[${clm_idx}]}
if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR monthly regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
+ printf "${spt_nm}: ERROR monthly climo cmd_clm[${clm_idx}] failed. Debug this:\n${cmd_clm[${clm_idx}]}\n"
exit 1
fi # !err
else # !par_opt
- eval ${cmd_rgr[${clm_idx}]} ${par_opt}
- rgr_pid[${clm_idx}]=$!
+ eval ${cmd_clm[${clm_idx}]} ${par_opt} # eval always returns 0 on backgrounded processes
+ clm_pid[${clm_idx}]=$!
+ # Potential alternatives to eval:
+ # eval "${cmd_clm[${clm_idx}]}" # borken
+ # ${cmd_clm[${clm_idx}]} # borken
+ # "${cmd_clm[${clm_idx}]}" # borken
+ # exec "${cmd_clm[${clm_idx}]}" # borken
+ # $(${cmd_clm[${clm_idx}]}) # borken
+ # $("${cmd_clm[${clm_idx}]}") # works (when & inside cmd quotes)
fi # !par_opt
fi # !dbg
- done
- # Start seasonal means first, then wait() for monthly regridding to finish
-fi # !rgr_map
-
-# Block 2: Climatological seasonal means
-# Block 2 Loop 1: Generate seasonal commands
-printf "Climatological seasonal means...\n"
-cmd_clm[13]="${cmd_mpi[13]} ncra --cb -O -w 31,30,31 ${nco_opt} ${fl_out[3]} ${fl_out[4]} ${fl_out[5]} ${fl_out[13]}"
-cmd_clm[14]="${cmd_mpi[14]} ncra --cb -O -w 30,31,31 ${nco_opt} ${fl_out[6]} ${fl_out[7]} ${fl_out[8]} ${fl_out[14]}"
-cmd_clm[15]="${cmd_mpi[15]} ncra --cb -O -w 30,31,30 ${nco_opt} ${fl_out[9]} ${fl_out[10]} ${fl_out[11]} ${fl_out[15]}"
-cmd_clm[16]="${cmd_mpi[16]} ncra --cb -O -w 31,31,28 ${nco_opt} ${fl_out[12]} ${fl_out[1]} ${fl_out[2]} ${fl_out[16]}"
-
-# Block 2 Loop 2: Execute and/or echo seasonal climatology commands
-for ((clm_idx=13;clm_idx<=16;clm_idx++)); do
- if [ ${dbg_lvl} -ge 1 ]; then
- echo ${cmd_clm[${clm_idx}]}
- fi # !dbg
- if [ ${dbg_lvl} -le 1 ]; then
- if [ -z "${par_opt}" ]; then
- eval ${cmd_clm[${clm_idx}]}
- if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR seasonal climo cmd_clm[${clm_idx}] failed. Debug this:\n${cmd_clm[${clm_idx}]}\n"
- exit 1
- fi # !err
- else # !par_opt
- eval ${cmd_clm[${clm_idx}]} ${par_opt}
- clm_pid[${clm_idx}]=$!
- fi # !par_opt
- fi # !dbg
-done # !clm_idx
-# wait() for monthly regridding, if any, to finish
-if [ -n "${rgr_map}" ]; then
+ done # !clm_idx
if [ -n "${par_opt}" ]; then
for ((clm_idx=1;clm_idx<=12;clm_idx++)); do
- wait ${rgr_pid[${clm_idx}]}
+ wait ${clm_pid[${clm_idx}]}
if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR monthly regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
+ printf "${spt_nm}: ERROR monthly climo cmd_clm[${clm_idx}] failed. Debug this:\n${cmd_clm[${clm_idx}]}\n"
exit 1
fi # !err
done # !clm_idx
fi # !par_opt
-fi # !rgr_map
-# wait() for seasonal climatologies to finish
-if [ -n "${par_opt}" ]; then
- for ((clm_idx=13;clm_idx<=16;clm_idx++)); do
- wait ${clm_pid[${clm_idx}]}
- if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR seasonal climo cmd_clm[${clm_idx}] failed. Debug this:\n${cmd_clm[${clm_idx}]}\n"
- exit 1
- fi # !err
- done # !clm_idx
-fi # !par_opt
-wait
+ wait
+
+ # Block 1: Loop 4: Regrid first twelve files. Load-balance by using idle nodes (nodes not used for seasonal climatologies).
+ if [ -n "${rgr_map}" ]; then
+ printf "Regrid monthly data...\n"
+ for ((clm_idx=1;clm_idx<=12;clm_idx++)); do
+ # NB: Months, seasons, files are 1-based ([1..12], [13..16], [1..17]), nodes are 0-based ([0..11])
+ let nd_idx=$(((clm_idx-1+4) % nd_nbr))
+ if [ ${nd_idx} -lt 4 ]; then
+ let nd_idx=${nd_idx}+4
+ fi # !nd
+ cmd_rgr[${clm_idx}]="${cmd_mpi[${nd_idx}]} ncks -t ${thr_nbr} -O ${nco_opt} ${rgr_opt} ${fl_out[${clm_idx}]} ${fl_rgr[${clm_idx}]}"
+ if [ "${mdl_typ}" = 'mpas' ]; then
+ cmd_rgr[${clm_idx}]="${cmd_mpi[${nd_idx}]} ncremap -C -u .pid${spt_pid}.climo.${clm_idx}.tmp -P mpas -t ${thr_nbr} -m ${rgr_map} -i ${fl_out[${clm_idx}]} -o ${fl_rgr[${clm_idx}]}"
+ fi # !mdl_typ
+ if [ ${dbg_lvl} -ge 1 ]; then
+ echo ${cmd_rgr[${clm_idx}]}
+ fi # !dbg
+ if [ ${dbg_lvl} -le 1 ]; then
+ if [ -z "${par_opt}" ]; then
+ eval ${cmd_rgr[${clm_idx}]}
+ if [ $? -ne 0 ]; then
+ printf "${spt_nm}: ERROR monthly regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
+ exit 1
+ fi # !err
+ else # !par_opt
+ eval ${cmd_rgr[${clm_idx}]} ${par_opt}
+ rgr_pid[${clm_idx}]=$!
+ fi # !par_opt
+ fi # !dbg
+ done
+ # Start seasonal means first, then wait() for monthly regridding to finish
+ fi # !rgr_map
+
+ # Block 2: Climatological seasonal means
+ # Block 2 Loop 1: Generate seasonal commands
+ printf "Climatological seasonal means...\n"
+ cmd_clm[13]="${cmd_mpi[13]} ncra --cb -O -w 31,30,31 ${nco_opt} ${fl_out[3]} ${fl_out[4]} ${fl_out[5]} ${fl_out[13]}"
+ cmd_clm[14]="${cmd_mpi[14]} ncra --cb -O -w 30,31,31 ${nco_opt} ${fl_out[6]} ${fl_out[7]} ${fl_out[8]} ${fl_out[14]}"
+ cmd_clm[15]="${cmd_mpi[15]} ncra --cb -O -w 30,31,30 ${nco_opt} ${fl_out[9]} ${fl_out[10]} ${fl_out[11]} ${fl_out[15]}"
+ cmd_clm[16]="${cmd_mpi[16]} ncra --cb -O -w 31,31,28 ${nco_opt} ${fl_out[12]} ${fl_out[1]} ${fl_out[2]} ${fl_out[16]}"
-# Block 2: Loop 4: Regrid seasonal files. Load-balance by using idle nodes (nodes not used for annual mean).
-if [ -n "${rgr_map}" ]; then
- printf "Regrid seasonal data...\n"
+ # Block 2 Loop 2: Execute and/or echo seasonal timeseries commands
for ((clm_idx=13;clm_idx<=16;clm_idx++)); do
- let nd_idx=$(((clm_idx-1+4) % nd_nbr))
- if [ ${nd_idx} -lt 4 ]; then
- let nd_idx=${nd_idx}+4
- fi # !nd
- cmd_rgr[${clm_idx}]="${cmd_mpi[${nd_idx}]} ncks -t ${thr_nbr} -O ${nco_opt} ${rgr_opt} ${fl_out[${clm_idx}]} ${fl_rgr[${clm_idx}]}"
- if [ "${mdl_typ}" = 'mpas' ]; then
- cmd_rgr[${clm_idx}]="${cmd_mpi[${nd_idx}]} ncremap -C -u .pid${spt_pid}.climo.${clm_idx}.tmp -P mpas -t ${thr_nbr} -m ${rgr_map} -i ${fl_out[${clm_idx}]} -o ${fl_rgr[${clm_idx}]}"
- fi # !mdl_typ
if [ ${dbg_lvl} -ge 1 ]; then
- echo ${cmd_rgr[${clm_idx}]}
+ echo ${cmd_clm[${clm_idx}]}
fi # !dbg
if [ ${dbg_lvl} -le 1 ]; then
if [ -z "${par_opt}" ]; then
- eval ${cmd_rgr[${clm_idx}]}
+ eval ${cmd_clm[${clm_idx}]}
if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR seasonal regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
+ printf "${spt_nm}: ERROR seasonal climo cmd_clm[${clm_idx}] failed. Debug this:\n${cmd_clm[${clm_idx}]}\n"
exit 1
fi # !err
else # !par_opt
- eval ${cmd_rgr[${clm_idx}]} ${par_opt}
- rgr_pid[${clm_idx}]=$!
+ eval ${cmd_clm[${clm_idx}]} ${par_opt}
+ clm_pid[${clm_idx}]=$!
fi # !par_opt
fi # !dbg
- done
- # Start annual mean first, then wait() for seasonal regridding to finish
-fi # !rgr_map
-
-# Block 3: Climatological annual mean (seventeenth file)
-printf "Climatological annual mean...\n"
-cmd_clm[17]="${cmd_mpi[17]} ncra --c2b -O -w 92,92,91,90 ${nco_opt} ${fl_out[13]} ${fl_out[14]} ${fl_out[15]} ${fl_out[16]} ${fl_out[17]}"
-if [ ${dbg_lvl} -ge 1 ]; then
- echo ${cmd_clm[17]}
-fi # !dbg
-if [ ${dbg_lvl} -le 1 ]; then
- if [ -z "${par_opt}" ]; then
- eval ${cmd_clm[17]}
- if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR annual climo cmd_clm[17] failed. Debug this:\n${cmd_clm[17]}\n"
- exit 1
- fi # !err
- else # !par_opt
- eval ${cmd_clm[17]} ${par_opt}
- clm_pid[17]=$!
- fi # !par_opt
-fi # !dbg
-# wait() for seasonal regridding, if any, to finish
-if [ -n "${rgr_map}" ]; then
+ done # !clm_idx
+ # wait() for monthly regridding, if any, to finish
+ if [ -n "${rgr_map}" ]; then
+ if [ -n "${par_opt}" ]; then
+ for ((clm_idx=1;clm_idx<=12;clm_idx++)); do
+ wait ${rgr_pid[${clm_idx}]}
+ if [ $? -ne 0 ]; then
+ printf "${spt_nm}: ERROR monthly regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
+ exit 1
+ fi # !err
+ done # !clm_idx
+ fi # !par_opt
+ fi # !rgr_map
+ # wait() for seasonal climatologies to finish
if [ -n "${par_opt}" ]; then
for ((clm_idx=13;clm_idx<=16;clm_idx++)); do
- wait ${rgr_pid[${clm_idx}]}
+ wait ${clm_pid[${clm_idx}]}
if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR seasonal regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
+ printf "${spt_nm}: ERROR seasonal climo cmd_clm[${clm_idx}] failed. Debug this:\n${cmd_clm[${clm_idx}]}\n"
exit 1
fi # !err
done # !clm_idx
fi # !par_opt
-fi # !rgr_map
-# wait() for annual climatology to finish
-if [ -n "${par_opt}" ]; then
- wait ${clm_pid[17]}
- if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR annual climo cmd_clm[17] failed. Debug this:\n${cmd_clm[17]}\n"
- exit 1
- fi # !err
-fi # !par_opt
-
-# Block 5: Regrid climatological annual mean
-if [ -n "${rgr_map}" ]; then
- printf "Regrid annual data...\n"
- for ((clm_idx=17;clm_idx<=17;clm_idx++)); do
- cmd_rgr[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncks -t ${thr_nbr} -O ${nco_opt} ${rgr_opt} ${fl_out[${clm_idx}]} ${fl_rgr[${clm_idx}]}"
- if [ "${mdl_typ}" = 'mpas' ]; then
- cmd_rgr[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncremap -C -u .pid${spt_pid}.climo.${clm_idx}.tmp -P mpas -t ${thr_nbr} -m ${rgr_map} -i ${fl_out[${clm_idx}]} -o ${fl_rgr[${clm_idx}]}"
- fi # !mdl_typ
- if [ ${dbg_lvl} -ge 1 ]; then
- echo ${cmd_rgr[${clm_idx}]}
- fi # !dbg
- if [ ${dbg_lvl} -le 1 ]; then
- # NB: Do not background climatological mean regridding
- eval ${cmd_rgr[${clm_idx}]}
- if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR annual regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
- exit 1
- fi # !err
- fi # !dbg
- done
-fi # !rgr_map
-
-# Link ACME-climo to AMWG-climo filenames
-# drc_pwd is always fully qualified path but drc_out and drc_rgr may be relative paths
-# Strategy: Start in drc_pwd, cd to drc_rgr, then link so return code comes from ln not cd
-if [ ${lnk_flg} = 'Yes' ]; then
- printf "Link ACME-climo to AMWG-climo filenames...\n"
- for ((clm_idx=1;clm_idx<=17;clm_idx++)); do
- if [ -n "${rgr_map}" ]; then
- cmd_lnk[${clm_idx}]="cd ${drc_pwd};cd ${drc_rgr};ln -s -f ${fl_rgr[${clm_idx}]/${drc_rgr}\//} ${fl_amwg[${clm_idx}]/${drc_rgr}\//}"
- else
- cmd_lnk[${clm_idx}]="cd ${drc_pwd};cd ${drc_out};ln -s -f ${fl_out[${clm_idx}]/${drc_out}\//} ${fl_amwg[${clm_idx}]/${drc_out}\//}"
- fi # !rgr_map
- if [ ${dbg_lvl} -ge 1 ]; then
- echo ${cmd_lnk[${clm_idx}]}
- fi # !dbg
- if [ ${dbg_lvl} -le 1 ]; then
- eval ${cmd_lnk[${clm_idx}]}
+ wait
+
+ # Block 2: Loop 4: Regrid seasonal files. Load-balance by using idle nodes (nodes not used for annual mean).
+ if [ -n "${rgr_map}" ]; then
+ printf "Regrid seasonal data...\n"
+ for ((clm_idx=13;clm_idx<=16;clm_idx++)); do
+ let nd_idx=$(((clm_idx-1+4) % nd_nbr))
+ if [ ${nd_idx} -lt 4 ]; then
+ let nd_idx=${nd_idx}+4
+ fi # !nd
+ cmd_rgr[${clm_idx}]="${cmd_mpi[${nd_idx}]} ncks -t ${thr_nbr} -O ${nco_opt} ${rgr_opt} ${fl_out[${clm_idx}]} ${fl_rgr[${clm_idx}]}"
+ if [ "${mdl_typ}" = 'mpas' ]; then
+ cmd_rgr[${clm_idx}]="${cmd_mpi[${nd_idx}]} ncremap -C -u .pid${spt_pid}.climo.${clm_idx}.tmp -P mpas -t ${thr_nbr} -m ${rgr_map} -i ${fl_out[${clm_idx}]} -o ${fl_rgr[${clm_idx}]}"
+ fi # !mdl_typ
+ if [ ${dbg_lvl} -ge 1 ]; then
+ echo ${cmd_rgr[${clm_idx}]}
+ fi # !dbg
+ if [ ${dbg_lvl} -le 1 ]; then
+ if [ -z "${par_opt}" ]; then
+ eval ${cmd_rgr[${clm_idx}]}
+ if [ $? -ne 0 ]; then
+ printf "${spt_nm}: ERROR seasonal regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
+ exit 1
+ fi # !err
+ else # !par_opt
+ eval ${cmd_rgr[${clm_idx}]} ${par_opt}
+ rgr_pid[${clm_idx}]=$!
+ fi # !par_opt
+ fi # !dbg
+ done
+ # Start annual mean first, then wait() for seasonal regridding to finish
+ fi # !rgr_map
+
+ # Block 3: Climatological annual mean (seventeenth file)
+ printf "Climatological annual mean...\n"
+ cmd_clm[17]="${cmd_mpi[17]} ncra --c2b -O -w 92,92,91,90 ${nco_opt} ${fl_out[13]} ${fl_out[14]} ${fl_out[15]} ${fl_out[16]} ${fl_out[17]}"
+ if [ ${dbg_lvl} -ge 1 ]; then
+ echo ${cmd_clm[17]}
+ fi # !dbg
+ if [ ${dbg_lvl} -le 1 ]; then
+ if [ -z "${par_opt}" ]; then
+ eval ${cmd_clm[17]}
if [ $? -ne 0 ]; then
- printf "${spt_nm}: ERROR linking ACME to AMWG filename cmd_lnk[${clm_idx}] failed. Debug this:\n${cmd_lnk[${clm_idx}]}\n"
+ printf "${spt_nm}: ERROR annual climo cmd_clm[17] failed. Debug this:\n${cmd_clm[17]}\n"
exit 1
fi # !err
- fi # !dbg
- done # !clm_idx
- cd ${drc_pwd}
-fi # !lnk_flg
+ else # !par_opt
+ eval ${cmd_clm[17]} ${par_opt}
+ clm_pid[17]=$!
+ fi # !par_opt
+ fi # !dbg
+ # wait() for seasonal regridding, if any, to finish
+ if [ -n "${rgr_map}" ]; then
+ if [ -n "${par_opt}" ]; then
+ for ((clm_idx=13;clm_idx<=16;clm_idx++)); do
+ wait ${rgr_pid[${clm_idx}]}
+ if [ $? -ne 0 ]; then
+ printf "${spt_nm}: ERROR seasonal regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
+ exit 1
+ fi # !err
+ done # !clm_idx
+ fi # !par_opt
+ fi # !rgr_map
+ # wait() for annual timeseries to finish
+ if [ -n "${par_opt}" ]; then
+ wait ${clm_pid[17]}
+ if [ $? -ne 0 ]; then
+ printf "${spt_nm}: ERROR annual climo cmd_clm[17] failed. Debug this:\n${cmd_clm[17]}\n"
+ exit 1
+ fi # !err
+ fi # !par_opt
-# Incremental climos
-if [ -n "${yr_srt_prv}" ]; then
+ # Block 5: Regrid climatological annual mean
+ if [ -n "${rgr_map}" ]; then
+ printf "Regrid annual data...\n"
+ for ((clm_idx=17;clm_idx<=17;clm_idx++)); do
+ cmd_rgr[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncks -t ${thr_nbr} -O ${nco_opt} ${rgr_opt} ${fl_out[${clm_idx}]} ${fl_rgr[${clm_idx}]}"
+ if [ "${mdl_typ}" = 'mpas' ]; then
+ cmd_rgr[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncremap -C -u .pid${spt_pid}.climo.${clm_idx}.tmp -P mpas -t ${thr_nbr} -m ${rgr_map} -i ${fl_out[${clm_idx}]} -o ${fl_rgr[${clm_idx}]}"
+ fi # !mdl_typ
+ if [ ${dbg_lvl} -ge 1 ]; then
+ echo ${cmd_rgr[${clm_idx}]}
+ fi # !dbg
+ if [ ${dbg_lvl} -le 1 ]; then
+ # NB: Do not background climatological mean regridding
+ eval ${cmd_rgr[${clm_idx}]}
+ if [ $? -ne 0 ]; then
+ printf "${spt_nm}: ERROR annual regrid cmd_rgr[${clm_idx}] failed. Debug this:\n${cmd_rgr[${clm_idx}]}\n"
+ exit 1
+ fi # !err
+ fi # !dbg
+ done
+ fi # !rgr_map
+
+ # Link ACME-climo to AMWG-climo filenames
+ # drc_pwd is always fully qualified path but drc_out and drc_rgr may be relative paths
+ # Strategy: Start in drc_pwd, cd to drc_rgr, then link so return code comes from ln not cd
+ if [ ${lnk_flg} = 'Yes' ]; then
+ printf "Link ACME-climo to AMWG-climo filenames...\n"
+ for ((clm_idx=1;clm_idx<=17;clm_idx++)); do
+ if [ -n "${rgr_map}" ]; then
+ cmd_lnk[${clm_idx}]="cd ${drc_pwd};cd ${drc_rgr};ln -s -f ${fl_rgr[${clm_idx}]/${drc_rgr}\//} ${fl_amwg[${clm_idx}]/${drc_rgr}\//}"
+ else
+ cmd_lnk[${clm_idx}]="cd ${drc_pwd};cd ${drc_out};ln -s -f ${fl_out[${clm_idx}]/${drc_out}\//} ${fl_amwg[${clm_idx}]/${drc_out}\//}"
+ fi # !rgr_map
+ if [ ${dbg_lvl} -ge 1 ]; then
+ echo ${cmd_lnk[${clm_idx}]}
+ fi # !dbg
+ if [ ${dbg_lvl} -le 1 ]; then
+ eval ${cmd_lnk[${clm_idx}]}
+ if [ $? -ne 0 ]; then
+ printf "${spt_nm}: ERROR linking ACME to AMWG filename cmd_lnk[${clm_idx}] failed. Debug this:\n${cmd_lnk[${clm_idx}]}\n"
+ exit 1
+ fi # !err
+ fi # !dbg
+ done # !clm_idx
+ cd ${drc_pwd}
+ fi # !lnk_flg
+fi # !bnr_flg
+
+# Extended climos
+if [ "${xtn_flg}" = 'Yes' ]; then
mkdir -p ${drc_prv}
mkdir -p ${drc_xtn}
trim_leading_zeros ${yr_srt_prv}
yr_srt_rth_prv=${sng_trm}
yyyy_srt_prv=`printf "%04d" ${yr_srt_rth_prv}`
+ yyyy_clm_srt_dec_prv=${yyyy_srt_prv}
let yr_srtm1_prv=${yr_srt_rth_prv}-1
- let yr_end_prv=${yr_srt_rth}-1
+ if [ "${ncr_flg}" = 'Yes' ]; then
+ let yr_end_prv=${yr_srt_rth}-1
+ fi # !ncr_flg
trim_leading_zeros ${yr_end_prv}
yr_end_rth_prv=${sng_trm}
yyyy_end_prv=`printf "%04d" ${yr_end_rth_prv}`
let yr_endm1_prv=${yr_end_rth_prv}-1
let yr_nbr_prv=${yr_end_rth_prv}-${yr_srt_rth_prv}+1
- let yr_nbr_xtn=${yr_end_rth}-${yr_srt_rth_prv}+1
- yyyy_clm_srt_dec_prv=${yyyy_srt_prv}
+ let yr_nbr_xtn=${yr_nbr_prv}+${yr_nbr}
+
wgt_prv=$(echo "${yr_nbr_prv}/${yr_nbr_xtn}" | bc -l)
wgt_crr=$(echo "${yr_nbr}/${yr_nbr_xtn}" | bc -l)
- printf "Incrementally produce extended climatology as weighted average of previous and current climatologies:\n"
- printf "Previous climatology is ${yr_nbr_prv} years from ${yyyy_clm_srt_dec_prv}${mm_ann_srt} to ${yyyy_end_prv}${mm_ann_end}, weight = ${wgt_prv}\n"
- printf "Current climatology is ${yr_nbr} years from ${yyyy_clm_srt_dec}${mm_ann_srt} to ${yyyy_end}${mm_ann_end}, weight = ${wgt_crr}\n"
- printf "Extended climatology is ${yr_nbr_xtn} years from ${yyyy_clm_srt_dec_prv}${mm_ann_srt} to ${yyyy_end}${mm_ann_end}\n"
+ if [ "${bnr_flg}" = 'Yes' ]; then
+ printf "Produce extended timeseries as weighted average of two previously computed climatologies:\n"
+ else # !bnr_flg
+ printf "Produce extended timeseries as weighted average of previously computed and incremental/new climatologies:\n"
+ fi # !bnr_flg
+ printf "Previous/first timeseries is ${yr_nbr_prv} years from ${yyyy_clm_srt_dec_prv}${mm_ann_srt} to ${yyyy_end_prv}${mm_ann_end}, weight = ${wgt_prv}\n"
+ printf "Current/second timeseries is ${yr_nbr} years from ${yyyy_clm_srt_dec}${mm_ann_srt} to ${yyyy_end}${mm_ann_end}, weight = ${wgt_crr}\n"
+ printf "Extended timeseries is ${yr_nbr_xtn} years from ${yyyy_clm_srt_dec_prv}${mm_ann_srt} to ${yyyy_end}${mm_ann_end}\n"
# Replace yr_srt by yr_srt_prv in "yrs_averaged" attribute
nco_opt="${nco_opt/${yr_srt}-/${yr_srt_prv}-}"
@@ -1142,7 +1189,7 @@ if [ -n "${yr_srt_prv}" ]; then
fi # !clm_md
done # !clm_idx
- printf "Weight previous and new native-grid climos to produce extended climo...\n"
+ printf "Weight input climos to produce extended climo...\n"
for ((clm_idx=1;clm_idx<=17;clm_idx++)); do
cmd_xtn[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncflint -O ${nco_opt} -w ${wgt_prv},${wgt_crr} ${fl_prv[${clm_idx}]} ${fl_out[${clm_idx}]} ${fl_xtn[${clm_idx}]}"
if [ ${dbg_lvl} -ge 1 ]; then
@@ -1173,7 +1220,7 @@ if [ -n "${yr_srt_prv}" ]; then
wait
if [ -n "${rgr_map}" ]; then
- printf "Weight previous and new regridded climos to produce extended regridded climo...\n"
+ printf "Weight input climos to produce extended regridded climo...\n"
for ((clm_idx=1;clm_idx<=17;clm_idx++)); do
cmd_rgr_xtn[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncflint -O ${nco_opt} -w ${wgt_prv},${wgt_crr} ${fl_rgr_prv[${clm_idx}]} ${fl_rgr[${clm_idx}]} ${fl_rgr_xtn[${clm_idx}]}"
if [ ${dbg_lvl} -ge 1 ]; then
@@ -1229,10 +1276,14 @@ if [ -n "${yr_srt_prv}" ]; then
cd ${drc_pwd}
fi # !lnk_flg
-fi # !yr_srt_prv incremental climos
+else # !xtn_flg extended climos
+
+ yr_nbr_xtn=${yr_nbr}
+
+fi # !xtn_flg extended climos
date_end=$(date +"%s")
-printf "Completed climatology generation for dataset ${caseid} at `date`.\n"
+printf "Completed ${yr_nbr_xtn}-year timeseries generation for dataset ${caseid} at `date`.\n"
date_dff=$((date_end-date_srt))
echo "Quick plots of climatological annual mean:"
if [ -n "${yr_srt_prv}" ]; then
diff --git a/data/tst-udunits.nco b/data/tst-udunits.nco
new file mode 100644
index 0000000..0e0c0f3
--- /dev/null
+++ b/data/tst-udunits.nco
@@ -0,0 +1,136 @@
+// Count number of errors
+nbr_err=0;
+nbr_err_ttl=0;
+
+// time units first
+{
+ // rebase coordinate both with "days"
+ a1=time;
+ a1 at units="days since 1970-1-1";
+ a1 at calendar="gregorian";
+
+ a2=udunits(a1, "days since 1970-1-2");
+ a2 at units="days since 1970-1-2";
+
+
+ if( fabs( (a1-a2).avg() -1.0 ) > 0.01d )
+ {
+ print("ERROR: a1: problem rebasing time coordinate a2\n");
+ nbr_err++;
+ }
+
+ // rebase coordinate both with "days" - 30 days a month calendar
+ a3=time.float();
+ a3 at units="days since 1970-1-29";
+ a3 at calendar="360_day";
+
+ a4=udunits(a3, "days since 1970-2-1");
+ a4 at units="days since 1970-2-1";
+
+
+ if( fabs( (a3-a4).avg() -2.0f ) > 0.01f )
+ {
+ print("ERROR: a3: problem rebasing time coord cal(360_day) a4\n");
+ nbr_err++;
+ }
+
+ // rebase coordinate both with "days" - 365 days calendar
+ // nb in "normal" calendar the difference would be 3 days with leap day - but here is 2 days
+
+ a5=time;
+
+ a5 at units="days since 2012-2-27";
+ a5 at calendar="365_day";
+
+
+ @units="days since 2012-03-01";
+ a6=udunits(a5, @units);
+ a6 at units=@units;
+
+ if( fabs( (a5-a6).avg() -2.0 ) > 0.01d )
+ {
+ print("ERROR: a5: problem re-basing time coord cal(365_day) a6\n");
+ nbr_err++;
+ }
+
+ //rebase corodinate hours and minutes
+ a7=time;
+ a7 at units="hours since 2012-01-01";
+ a7 at calendar="gregorian";
+
+ a8 at units="minutes since 2012-01-01";
+
+ a8=udunits(a7, a8 at units);
+ a8 at units="minutes since 2012-01-01";
+
+
+ if( fabs(a8.sum()-3300.0) >0.1 )
+ {
+ print("ERROR: a7: change time coord units from hours to days a7\n");
+ nbr_err++;
+ }
+
+
+ //rebase corodinate days to hours - 360 calendar
+ a9=time;
+ a9 at units="days since 2012-01-30";
+ a9 at calendar="360_day";
+
+ @units="hours since 2012-02-01 01:00";
+
+ a10=udunits(a9, @units);
+ a10 at units=@units;
+ a10 at calendar="360_day";
+
+
+ if( fabs(a10.sum()-1070.0) >0.1 )
+ {
+ print("ERROR: a9: change time coord units from days to hours (360_calendar)\n");
+ nbr_err++;
+ }
+
+
+ //rebase corodinate mins to seconds - 365 calendar
+ a11=time;
+ // nb in this calendar NO a Leap year
+ a11 at units="minutes since 2012-02-28 23:58:00.00";
+ a11 at calendar="365_day";
+
+
+ @units="seconds since 2012-03-01 00:00";
+ a12=udunits(a11, @units);
+ a12 at units=@units;
+ a12 at calendar="365_day";
+
+
+ if( fabs(a12.sum()-2100.0) >0.1 )
+ {
+ print("ERROR: a11: change time coord units from mins to seconds (365_calendar)\n");
+ nbr_err++;
+ }
+
+// other units
+{
+
+ T[lon]={0.0, 100.0, 150.0, 200.0};
+ T at units="Celsius";
+
+ @units="kelvin";
+ // over-write var
+ T=udunits(T, @units);
+ T at units=@units;
+
+
+ if( fabs(T.sum()-1542.6) >0.1 )
+ {
+ print("ERROR: T: change Temperature units from Celsius to Kelvin)\n");
+ nbr_err++;
+ }
+
+
+
+}
+
+
+
+}
\ No newline at end of file
diff --git a/debian/changelog b/debian/changelog
index aa4ea0e..22d25b5 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,9 @@
+nco (4.6.3-1) unstable; urgency=low
+
+ * new upstream version rebasing, long options, JSON brackets, udunits(), binary/annual climos
+
+ -- Charlie Zender <zender at uci.edu> Fri, 23 Dec 2016 12:25:17 -0700
+
nco (4.6.2-1) unstable; urgency=low
* new upstream version JSON, multi-args, better behavior in modules
diff --git a/debian/doc-base b/debian/doc-base
index e994703..1f1f074 100644
--- a/debian/doc-base
+++ b/debian/doc-base
@@ -1,5 +1,5 @@
Document: nco
-Title: NCO User's Guide
+Title: NCO Users Guide
Author: zender at uci.edu
Abstract: This manual describes the netCDF Operators, NCO.
NCO is a suite of programs, or operators, that take netCDF files as
diff --git a/doc/ANNOUNCE b/doc/ANNOUNCE
index fd1a143..01b3e13 100644
--- a/doc/ANNOUNCE
+++ b/doc/ANNOUNCE
@@ -1,105 +1,117 @@
$Header$ -*-text-*-
-The netCDF Operators NCO version 4.6.2 are ready.
+The netCDF Operators NCO version 4.6.3 are ready.
http://nco.sf.net (Homepage, Mailing lists)
http://github.com/nco (Source Code, Releases, Developers)
What's new?
-4.6.2 is mainly a stability release to polish existing features and to
-add minor new ones. The exception is the new JSON backend.
-NCO now prints CDL, "Traditional NCO", XML, and JSON. Babel-icious eh?
+4.6.3 adds many new convenience features to existing functionality
+like JSON, ncap2, ncremap, and ncclimo. Multi-dimensional bracketing
+completes our JSON implementation. ncap2 adds a convenient UDUnits
+conversion function. ncremap and ncclimo support long options.
+ncclimo supports binary climatology generation and annual-mean mode.
-We built the JSON backend to help a project (DOE Terraref).
-Our first choice was to adopt an off-the-shelf netCDF->JSON tool.
-However, no existing solution worked for us.
-JSON is a loose syntax, and we made necessary design choices that
-suited our application, and left some choices for later.
-Are there syntactical variants you want us to add?
-
-Some users of netCDF version 4.4.1 cannot build NCO from scratch
-because a bug in the nc-config command kills NCO's 'configure;make'.
-Unidata will ship a corrected nc-config in netCDF 4.4.2.
-The 4.4.2-development branch already contains the necessary fix.
-
-Work on NCO 4.6.3 has commenced. Planned improvements include more
-flexibility in handling extensive variables during regridding, CMake
-support, and brackets for multi-dimensional array values in JSON.
+Work on NCO 4.6.4 has commenced. Planned improvements include CMake
+builds, more flexibility in handling extensive variables during
+regridding.
Enjoy,
Charlie
NEW FEATURES (full details always in ChangeLog):
-A. JSON backend
- Similar the CDL and XML backends, ncks now supports JSON.
- To obtain JSON, use --jsn or --json instead of --cdl/--xml.
- ncks --json -v one ~/nco/data/in.nc
- {
- "variables": {
- "one": {
- "type": "float",
- "attributes": {
- "long_name": "one"
- },
- "data": 1.0
- }
- }
- }
- Other related options (-v, -g, -m, -M) have their usual meanings.
- The --jsn_att_fmt switch accesses formats of selectable verbosity.
- Thanks to Henry Butowsky for implementing the JSON backend,
- and to Pedro Vicente and Chris Barker for helpful discussions.
+A. ncclimo supports "binary climos" and annual-mean mode.
+ Binary climos are climos created from merging two climos, rather
+ than re-computing a climos from raw input.
+ This saves disk space and time for long climos.
+ Annual-mean mode allows ncclimo to process input files that are
+ annual rather than monthly means.
+ http://nco.sf.net/nco.html#ncclimo
+
+B. ncrcat and ncra now re-base data (move to a common time origin)
+ from arbitrary time units in multiple calendar systems.
+ Previously, re-basing only worked when the basetime (i.e., the
+ YYMMDD in units like "XXX since YYMMDD") changed.
+ Now rebasing takes into account the full units, both the increment
+ (XXX) and the basetime (YYMMDD). Thanks to Dave Allured for the
+ suggestion and Henry Butowsky for the re-implementation.
+ http://nco.sf.net/nco.html#rbs
+
+C. ncap2 now supports converting data between any two compatible units
+ systems supported by UDUnits. The udunits() function takes and
+ input variable and a UDUnits dimension string.
+ T[lon]={0.0,100.0,150.0,200.0};
+ T at units="Celsius";
+ T=udunits(T,"kelvin");
+ print(T);
+ 273.15, 373.15, 423.15, 473.15 ;
+ The method auto-magically reads var_in at units and var_in at calendar
+ (so, YES, this works with dates) attributes as necessary.
+ Thanks to Henry Butowsky for this feature.
+ http://nco.sf.net/nco.html#udunits_fnc
+
+D. ncclimo and ncremap now support long options, e.g.,
+ ncclimo --case=caseid --input=drc_in --output=drc_out --map=rgr_map
+ ncremap --input_file=in_fl --destination=dst_fl --output_file=out_fl
+ http://nco.sf.net/nco.html#ncclimo
+ http://nco.sf.net/nco.html#ncremap
+
+E. ncclimo and ncremap now save the full command line with which they
+ were invoked as a single global attribute.
+ Previously portions were saved as separate attributes.
+ The new attributes are climo_command and remap_command.
+ Their contents will exactly replicate (except for datestamps)
+ the climatologies or regridded files they were created for.
+ This improved provenance comes at the cost of up to a few kB more
+ metadata in each file.
+
+F. ncks can now print attribute CDL types as comments.
+ CDL attribute types can be hard for humans to discern, so now
+ ncks will print the type when invoked with -D 1 or greater.
+ The printed file is fully CDL-compliant and works with ncgen.
+ Credit to whomever first thought of this feature, and implemented
+ it in some CDL output someone sent me.
+ zender at firn:~/nco$ ncks -D 1 -C --cdl ~/nco/data/in_4.nc
+ ...
+ att_var:byte_att = 0b, 1b, 2b, 127b, -128b, -127b, -2b, -1b ; // byte
+ att_var:char_att = "Sentence one.\n",
+ "Sentence two.\n" ; // char
+ att_var:short_att = 37s ; // short
+ att_var:int_att = 73 ; // int
+ att_var:float_att = 73.f, 72.f, 71.f, 70.01f, 69.001f, 68.01f, 67.01f ; // float
+ att_var:double_att = 73., 72., 71., 70.01, 69.001, 68.01, 67.010001 ; // double
+ att_var:ubyte_att = 0ub, 1ub, 2ub, 127ub, 128ub, 254ub, 255ub, 0ub ; // ubyte
+ att_var:ushort_att = 37us ; // ushort
+ att_var:uint_att = 73ul ; // uint
+
+G. JSON brackets
+ Similar the CDL and XML backends, ncks supports JSON (as of 4.6.2).
+ ncks now prints strided brackets to demarcate inner dimensions
+ of multi-dimensional variable data.
+ Invoking with --json vs. --jsn_fmt=4 on foo(2,3,4) yields:
+ "data": [[[0.0, 1.0, 2.0, 3.0], [4.0, 5.0, 6.0, 7.0], [8.0, 9.0,
+ 10.0, 11.0]], [[12.0, 13.0, 14.0, 15.0], [16.0, 17.0, 18.0, 19.0],
+ [20.0, 21.0, 22.0, 23.0]]]
+ "data": [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0,
+ 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0,
+ 22.0, 23.0]
+ Bracketed data are suitable for pasting into Python.
+ More sample output at:
+ http://dust.ess.uci.edu/tmp/in.json and other *.json files.
+ Thanks to Henry Butowsky for implementing the bracketing.
http://nco.sf.net/nco.html#json
-A. Multi-arguments refer to the format of command-line key-value pairs
- that NCO uses to support four different features that can have too
- many parameters to set via traditional command line arguments.
- The four features are regridding (rgr), global-attribute-adding
- (gaa), precision-preserving compression (ppc), and ENVI image
- processing for the DOE Terraref project (trr). Formerly, users were
- required to supply one value per command line option for these
- features, e.g.,
- "--rgr lat_nbr=64 --rgr lon_nbr=128 --rgr lat_grd=cap ..."
- The new multi-argument feature allows users to aggregate multiple
- key-value pairs per command line argument, e.g.,
- "--rgr lat_nbr=64#lon_nbr=128#lat_grd=cap# ..."
- Now all key-value pairs related to a single feature (like
- regridding, compression) can be provided as one arument!
-
- The string that delimits each pair is user-configurable and
- defaults to a single hash "#". Change it with, e.g., --dlm=":".
- Ensure your delimiters are not special shell characters, and are
- escaped (with backslashes) if present in key or value strings.
- Thanks to Jerome Mao for implementing multi-arguments.
- http://nco.sf.net/nco.html#mta
-
-A. All builds tested on LLVM 8.0.0 and GCC 6.2.1 toolchains.
-
BUG FIXES:
-A. ncclimo and ncremap could fail if ${BASH_SOURCE[0]} is unavailable.
- This could occur, e.g., with the dash shell and/or due to software
- modules that modify users' environments.
- This portability issue has been resolved.
-
-B. ncatted failed to implement NaN on some MinGW systems with, e.g.,
- ncatted -O -a _FillValue,fll_val,m,f,nan ~/nco/data/in.nc ~/foo.nc
- We think this has been fixed. The workaround is to upgrade or use
- ncatted on *NIX or MacOS. Thanks to James Adams for reporting this.
-
-C. ncks CDL now prints correct tokens for uint, int64, uint64 types.
- These are "ul", "ll", "ull", respectively. Previously NCO used
- different tokens because ncgen <= 4.3.0 choked on "ll" formats.
- So this was an intentional NCO "feature", not a bug.
- Use ncgen >= 4.3.1 to parse these CDL files.
+A. None!
KNOWN PROBLEMS DUE TO NCO:
This section of ANNOUNCE reports and reminds users of the
existence and severity of known, not yet fixed, problems.
- These problems occur with NCO 4.6.2 built/tested under
+ These problems occur with NCO 4.6.3 built/tested under
MacOS 10.12.1 with netCDF 4.4.1 on HDF5 1.8.16 and with
Linux with netCDF 4.4.2-development (20161116) on HDF5 1.8.16.
@@ -159,16 +171,24 @@ B. NOT YET FIXED (netCDF4 library bug)
Bug tracking: https://www.unidata.ucar.edu/jira/browse/fxm
More details: http://nco.sf.net/nco.html#ncrename_crd
-C. NOT YET FIXED (netCDF4 library bug)
+C. FIXED in netCDF Development branch as of 20161116 and in maintenance release 4.4.1.1
nc-config/nf-config produce erroneous switches that cause NCO builds to fail
- This problem (apparently) affects Linux not MacOS with netCDF 4.4.1
+ This problem affects netCDF 4.4.1 on all operating systems.
+ Some pre-compiled netCDF packages may have patched the problem.
+ Hence it does not affect my MacPorts install of netCDF 4.4.1.
Demonstration:
- nc-config --cflags # Produces extraneous text
+ % nc-config --cflags # Produces extraneous text that confuses make
+ Using nf-config: /usr/local/bin/nf-config
+ -I/usr/local/include -I/usr/local/include -I/usr/include/hdf
+
+ If your nc-config output contains the "Using ..." line, you are
+ affected by this issue.
20161029: Reported problem to Unidata
20161101: Unidata confirmed reproducibility, attributed to netCDF 4.4.1 changes
20161116: Unidata patch is in tree for netCDF 4.4.2 release
+ 20161123: Fixed in maintenance release netCDF 4.4.1.1
D. NOT YET FIXED (would require DAP protocol change?)
Unable to retrieve contents of variables including period '.' in name
diff --git a/doc/ChangeLog b/doc/ChangeLog
index 4aeb7fa..a0f3d65 100644
--- a/doc/ChangeLog
+++ b/doc/ChangeLog
@@ -1,8 +1,149 @@
+2016-12-23 Charlie Zender <zender at uci.edu>
+
+ * NCO 4.6.3 release procedure:
+ cd ~/nco;git commit -a -m 'Version 4.6.3 final changes';git push
+ git tag -a 4.6.3 -m 'Version 4.6.3 new features: rebasing, long options, JSON brackets, udunits(), binary/annual climos';git push --tags
+
+2016-12-22 Charlie Zender <zender at uci.edu>
+
+ * Add NC_EACCESS error hints
+
+2016-12-21 Charlie Zender <zender at uci.edu>
+
+ * NCO 4.6.3-beta01 release procedure:
+ cd ~/nco;git commit -a -m 'Version 4.6.3-beta01 final changes';git push
+ git tag -a 4.6.3-beta01 -m 'Version 4.6.3-beta01 new features: tweak JSON';git push --tags
+
+2016-12-20 Charlie Zender <zender at uci.edu>
+
+ * NCO 4.6.3-alpha05 release procedure:
+ cd ~/nco;git commit -a -m 'Version 4.6.3-alpha05 final changes';git push
+ git tag -a 4.6.3-alpha05 -m 'Version 4.6.3-alpha05 new features: long options to ncremap, JSON brackets';git push --tags
+
+2016-12-20 Henry Butowsky <henryb at hush.com>
+
+ * Add --jsn_data_brk to print JSON data array brackets
+
+2016-12-19 Charlie Zender <zender at uci.edu>
+
+ * Name and document long synonyms for ncremap
+
+2016-12-14 Charlie Zender <zender at uci.edu>
+
+ * NCO 4.6.3-alpha04 release procedure:
+ cd ~/nco;git commit -a -m 'Version 4.6.3-alpha04 final changes';git push
+ git tag -a 4.6.3-alpha04 -m 'Version 4.6.3-alpha04 new features: long options to ncclimo';git push --tags
+
+2016-12-13 Charlie Zender <zender at uci.edu>
+
+ * Finish long argument documentation for ncclimo
+
+2016-12-10 Charlie Zender <zender at uci.edu>
+
+ * Add long options to ncclimo. Use Adam Katz's method from StackOverflow.
+
+ * Quote $OPTARG on RHS to handle whitespace correctly
+
+2016-12-9 Henry Butowsky <henryb at hush.com>
+
+ * Re-base when increment (not just basetime) changes between files for calendar=360_day, 365_day
+
+2016-12-06 Charlie Zender <zender at uci.edu>
+
+ * Implement climo_ann.sh functionality in ncclimo and deprecate climo_ann.sh
+
+ * Rename clm_md to dec_md for December mode, and use clm_md for Climatology Mode = ann, mth, dly
+
+2016-12-05 Charlie Zender <zender at uci.edu>
+
+ * NCO 4.6.3-alpha03 release procedure:
+ cd ~/nco;git commit -a -m 'Version 4.6.3-alpha03 final changes';git push
+ git tag -a 4.6.3-alpha03 -m 'Version 4.6.3-alpha03 new features: mpaso, mpascice, CMake, rebasing';git push --tags
+
+ * Cleared up most bugs from new rebasing code
+
+ * Pedro has begun adding CMake support
+
+ * ncra: reset error code in --cb block to prevent exiting with apparent error
+
+ * Update ncclimo for new mpaso, mpascice conventions
+
+ * Pad metadata headers to 10kB with --hdr_pad in ncremap, nccclimo
+
+ * Preserve full command-line (with --gaa ${cmd_ln}) in ncremap, ncclimo
+
+ * Deprecate (temporarily, I hope) _GNU_SOURCE so ncap_yacc builds without stpcpy() problems
+
+ * Patches necessary when UDUNITS not installed
+
+2016-12-04 Charlie Zender <zender at uci.edu>
+
+ * Always return value to non-void functions: good practice and required by rpmlint
+ Define _GNU_SOURCE macro when to obtain prototype for GNU extension sngcasestr() from string.h
+ Patches supplied by Manfred Schwarb in nco bugs #94
+
+2016-11-30 Charlie Zender <zender at uci.edu>
+
+ * NCO 4.6.3-alpha02 release procedure:
+ cd ~/nco;git commit -a -m 'Version 4.6.3-alpha02 final changes';git push
+ git tag -a 4.6.3-alpha02 -m 'Version 4.6.3-alpha02 new features: ncclimo binary climatologies';git push --tags
+
+ * Henry implemented ncra re-base when increment, not just basetime, changes between files
+
+ * Make defaults for drc_prv sensible for bnr_flg and ncr_flg in ncclimo
+
+2016-11-29 Charlie Zender <zender at uci.edu>
+
+ * Tested and working on FC25, no mods necessary, yay!
+
+ * Print attribute CDL type as comment when dbg_lvl >= 1
+
+2016-11-25 Charlie Zender <zender at uci.edu>
+
+ * Modify ncclimo to distinguish two types Extended climos: Meta and Incremental
+
+2016-11-22 Charlie Zender <zender at uci.edu>
+
+ * Change NCO_CNK_CSH_BYT_DFL to 0 since it is unsigned
+
+2016-11-21 Charlie Zender <zender at uci.edu>
+
+ * NCO 4.6.3-alpha01 release procedure:
+ cd ~/nco;git commit -a -m 'Version 4.6.3-alpha01 final changes';git push
+ git tag -a 4.6.3-alpha01 -m 'Version 4.6.3-alpha01 new features: bld/Makefile updates for ar, GSL;ncap2 chunking;';git push --tags
+
+ * Add --cnk_csh_byt option and cache diagnostics
+
+ * ncap2 no longer chunks all variables with asking, and follows command-line inputs
+
+2016-11-18 Charlie Zender <zender at uci.edu>
+
+ * Revise and clarify ncwa memory usage in nco.texi
+
+ * [NCO]_GSL_MINOR_VERSION rx fails in fmt_gsl_cls.cc due to new GSL 2.x numbering, breaks bld/Makefile
+
+ * Compiling on grele shows netCDF 4.3.3.1 chokes on CDL naked constant ll, LL, ull, ULL syntax
+ Remove such syntax from in_4.cdl to prevent breakage with bld/Makefile
+
+2016-11-17 Charlie Zender <zender at uci.edu>
+
+ * Excise U from ARFLAGS on rhea/titan/yellowstone (RHEL6) since -U implemented in ar after ~2009
+
+ * Clarify that rebasing feature only works on basetime in response to Dave Allured's question/comment
+
2016-11-16 Charlie Zender <zender at uci.edu>
+ * Merge spelling fixes from Sebastian Couwenberg
+
* NCO 4.6.2 release procedure:
cd ~/nco;git commit -a -m 'Version 4.6.2 final changes';git push
- git tag -a 4.6.2 -m 'Version 4.6.2 new features: multi-argument documentation, ncap2 value_list simplification, JSON indentation';git push --tags
+ git tag -a 4.6.2 -m 'Version 4.6.2 new features: JSON, multi-arguments, module compatibility';git push --tags
+
+ * NB: 4.6.2-beta04 was never tagged or released. Instead it became 4.6.2-final.
+
+ * NCO 4.6.2-beta04 release procedure:
+ cd ~/nco;git commit -a -m 'Version 4.6.2-beta04 final changes';git push
+ git tag -a 4.6.2-beta04 -m 'Version 4.6.2-beta04 new features: multi-argument documentation, ncap2 value_list simplification, JSON indentation';git push --tags
* Henry fixed regular brace staircases in JSON
@@ -6637,10 +6778,10 @@
2011-06-26 Charlie Zender <zender at uci.edu>
* Remove UDUNITS section of configure.in because it was overly
- complex to maintain with UDUNITS2 which supercedes it
+ complex to maintain with UDUNITS2 which supersedes it
* Remove DAP-OPENDAP section of configure.in because it was overly
- complex to maintain with DAP-NETCDF which supercedes it
+ complex to maintain with DAP-NETCDF which supersedes it
* Re-introduce ENABLE_NETCDF4
diff --git a/doc/MANIFEST b/doc/MANIFEST
index 8526641..d99081f 100644
--- a/doc/MANIFEST
+++ b/doc/MANIFEST
@@ -152,13 +152,13 @@ nco/doc/mpi.txt MPI Environments for NCO
nco/doc/my-bib-macros.texi TeXInfo bibliography macros for nco.texi
nco/doc/nasa.png NASA logo (PNG format)
nco/doc/ncap.txt Description of the ncap operator (not yet used)
-nco/doc/nco.dvi(v) User's Guide: DVI format
-nco/doc/nco.html(v) User's Guide: HTML format
-nco/doc/nco.info(v) User's Guide: Info format
-nco/doc/nco.pdf(v) User's Guide: Portable Document Format
+nco/doc/nco.dvi(v) Users Guide: DVI format
+nco/doc/nco.html(v) Users Guide: HTML format
+nco/doc/nco.info(v) Users Guide: Info format
+nco/doc/nco.pdf(v) Users Guide: Portable Document Format
nco/doc/nco.png Webpage icon (PNG format)
-nco/doc/nco.ps(v) User's Guide: Postscript format
-nco/doc/nco.texi User's Guide: TeXInfo format
+nco/doc/nco.ps(v) Users Guide: Postscript format
+nco/doc/nco.texi Users Guide: TeXInfo format
nco/doc/nco_news.shtml News archive
nco/doc/nco_rfr_crd.pdf(v) Quick Reference Card
nco/doc/netcdf.h.3.6.3 Version 3.6.X of netcdf.h for backward compatibility
diff --git a/doc/README b/doc/README
index 4fc50f5..7eaac65 100644
--- a/doc/README
+++ b/doc/README
@@ -14,7 +14,7 @@ and outputs information, usually a processed netCDF file
Although most users of netCDF and HDF data are involved in scientific
research, these data formats, and thus NCO, are generic and are
equally useful in fields from agriculture to zoology.
-The NCO User's Guide illustrates NCO use with examples from the field
+The NCO Users Guide illustrates NCO use with examples from the field
of climate modeling and analysis.
The NCO homepage is http://nco.sf.net.
diff --git a/doc/TODO b/doc/TODO
index d1d5455..8cdf927 100644
--- a/doc/TODO
+++ b/doc/TODO
@@ -112,7 +112,7 @@ nco148. Put in more useful error diagnostics for Schweitzer's data holes
ncrcat -O -C -d lon,10.0,80.0 -v time_lon in.nc foo.nc;ncks -H foo.nc
nco149. Add ncks hyperslab tests to nco_tst.pl to catch inconsistencies like #148
nco151. Dynamically modify web pages so files with changing version numbers can be directly linked
-nco158. Add policy for hyperslabbing single level values to User's Guide
+nco158. Add policy for hyperslabbing single level values to Users Guide
Multi-file operators with record coordinate
treat single point hyperslabs differently than single file operators
or than any operators with single point cuts on non-record coordinates.
@@ -689,7 +689,10 @@ nco1131. nco: ncra --cb wrong unless single record per input file (see note tagg
ncks --cdl -v climatology_bounds ~/foo2.nc
climatology_bounds are [0.5,1.5] but should be [2.5,6.5]
nco1132. nco: ncclimo fails when input files have different variable orders. Allow user to suppress cb, or make ncra.c re-read cb IDs each file (Guido 20161101)
-nco1133.
+nco1133. ncks CDL-mode do UDUnits time conversions to ISO (added 20161116)
+ ncks -H -v tm_.? ~/nco/data/in.nc
+ ncdump -t ~/nco/data/in.nc | grep tm_
+nco1134.
qrk
************************************************************************
End ncoXXX TODOs
diff --git a/doc/VERSION b/doc/VERSION
index c78c496..83da99b 100644
--- a/doc/VERSION
+++ b/doc/VERSION
@@ -1 +1 @@
-4.6.2
+4.6.3
diff --git a/doc/debian.txt b/doc/debian.txt
index e3eecea..c013314 100644
--- a/doc/debian.txt
+++ b/doc/debian.txt
@@ -29,18 +29,18 @@ apt-get install dh-make debhelper devscripts fakeroot gnupg debian-policy develo
2. Debian build procedure recommends placing entire package source in
subdirectory of main package.
- For starters, we wish to create .debs of tagged releases, e.g., nco-4.6.2
- First we create a clean source distribution of nco and place it in nco-4.6.2
+ For starters, we wish to create .debs of tagged releases, e.g., nco-4.6.3
+ First we create a clean source distribution of nco and place it in nco-4.6.3
Until we know what is necessary, however, we just copy a snapshot
2.1 Clean all build files from development directory
cd ~/nco;make distclean;cd bld;make clean;cd ~
tar cvzf ./nco/nco.tar.gz ./nco/*
-cd ~/nco;tar xvzf nco.tar.gz;mv nco nco-4.6.2
-/bin/rm nco.tar.gz;tar cvzf nco-4.6.2.tar.gz ./nco-4.6.2/*
-cd ~/nco/nco-4.6.2
-dh_make -e zender at uci.edu -f ../nco-4.6.2.tar.gz
+cd ~/nco;tar xvzf nco.tar.gz;mv nco nco-4.6.3
+/bin/rm nco.tar.gz;tar cvzf nco-4.6.3.tar.gz ./nco-4.6.3/*
+cd ~/nco/nco-4.6.3
+dh_make -e zender at uci.edu -f ../nco-4.6.3.tar.gz
2.2 The preceding steps created template debian files for a .deb,
Those files now reside in ~/nco/debian.
@@ -55,7 +55,7 @@ dh_make -e zender at uci.edu -f ../nco-4.6.2.tar.gz
from previous build
cd ~/nco;/bin/rm *.gz
- cd ~/nco/nco-4.6.2
+ cd ~/nco/nco-4.6.3
dpkg-buildpackage -rfakeroot > foo 2>&1
dpkg-buildpackage -rsudo > foo 2>&1
@@ -84,33 +84,33 @@ patch -p0 < nco_X.Y.Z-3.diff # Patch destination with Debian diff
make tags
# Put cute version-specific string in nco_ctl.c:nco_nmn_get()
# Install correct version numbers before updating Debian
-# tags-query replace 4.6.2 with X.Y.Z+1
+# tags-query replace 4.6.3 with X.Y.Z+1
# If tags-query replace does not work, be sure to manually change
# versions in configure.ac, debian/files, doc/ANNOUNCE, doc/debian.txt,
# doc/index.shtml, doc/nco.texi, bld/nco_dst.pl, doc/VERSION
# 20141201: Change NCO_VERSION_PATCH in src/nco.h!!!!!!!!!!!!!!!!!!!!!!
- cd ~/nco/debian;dch -b --force-distribution --distribution=unstable -v 4.6.2-1 # Update changelog (-b forces this version number)
+ cd ~/nco/debian;dch -b --force-distribution --distribution=unstable -v 4.6.3-1 # Update changelog (-b forces this version number)
emacs ~/nco/bld/nco.spec # Update changelog
# For unknown reason rules file may lose its executable bit
chmod a+x ~/nco/debian/rules
# Rebuild autotools so new version # propagates
cd ~/nco;aclocal;autoheader;automake --foreign;autoconf
# Save all files in emacs before tagging
- ${HOME}/nco/bld/nco_dst.pl --dbg=2 --bld --cln nco-4.6.2
+ ${HOME}/nco/bld/nco_dst.pl --dbg=2 --bld --cln nco-4.6.3
# Upload tarball to SF https://sourceforge.net/projects/nco/files
- cd;scp dust.ess.uci.edu:/var/www/html/nco/src/nco-4.6.2.tar.gz .
+ cd;scp dust.ess.uci.edu:/var/www/html/nco/src/nco-4.6.3.tar.gz .
7. Ubuntu PPA
https://help.launchpad.net/Packaging/PPA
-dput NCO nco_4.6.2-2~ppa1_source.changes
+dput NCO nco_4.6.3-2~ppa1_source.changes
sudo apt-key adv --recv-keys --keyserver keyserver.ubuntu.com
# Location of build diagnostics for mentors to help
-http://dust.ess.uci.edu/nco/src/nco_4.6.2-1.dpkg-buildpackage.txt
-http://dust.ess.uci.edu/nco/src/nco_4.6.2-1.dsc
-http://dust.ess.uci.edu/nco/src/nco_4.6.2-1_i386.changes
-http://dust.ess.uci.edu/nco/src/nco_4.6.2-1_i386.deb
-http://dust.ess.uci.edu/nco/src/nco_4.6.2.orig.tar.gz
+http://dust.ess.uci.edu/nco/src/nco_4.6.3-1.dpkg-buildpackage.txt
+http://dust.ess.uci.edu/nco/src/nco_4.6.3-1.dsc
+http://dust.ess.uci.edu/nco/src/nco_4.6.3-1_i386.changes
+http://dust.ess.uci.edu/nco/src/nco_4.6.3-1_i386.deb
+http://dust.ess.uci.edu/nco/src/nco_4.6.3.orig.tar.gz
# Becoming a Debian developer
http://www.debian.org/devel/join/newmaint
@@ -164,31 +164,31 @@ Matej Vela <vela at debian.org>, Daniel Baumann <daniel at debian.org>, Warren Turkal
# export LD_LIBRARY_PATH=/usr/lib:/lib:/usr/X11R6/lib
# sudo aptitude install antlr bison flex gsl-bin libgsl0-dev libantlr-dev netcdf-bin libnetcdfc7 libnetcdf-dev texinfo libcurl4-gnutls-dev libexpat1-dev libxml2-dev udunits-bin libudunits2-0 libudunits2-dev
cd ~/nco;cvc
-sudo /bin/rm -rf ${DATA}/nco-4.6.2 ${DATA}/nco_4.6.2* ${DATA}/debian # Cleanup last build. sudo necessary for removal because dpkg-buildpackage uses sudo?
-# cd ${DATA};cvs -d zender at nco.cvs.sf.net:/cvsroot/nco export -kkv -r nco-4.6.2-1 -d nco-4.6.2 nco # Export based on tag
-cd ${DATA};cvs -d zender at nco.cvs.sf.net:/cvsroot/nco export -kkv -D "1 second ago" -d nco-4.6.2 nco # Export most recent
-tar cvzf ./nco_4.6.2.orig.tar.gz --exclude='nco-4.6.2/debian*' --exclude='.cvsignore' --exclude='ncap_lex.c' --exclude='ncap_yacc.[ch]' ./nco-4.6.2
-/bin/rm -rf ${DATA}/nco-4.6.2 # Remove cvs-exported directory
-tar xvzf ./nco_4.6.2.orig.tar.gz # Untar to get directory without excluded files
-mkdir -p ${DATA}/nco-4.6.2/debian/source;cd ~/nco/debian;/bin/cp changelog compat control convert copyright doc-base files info rules ${DATA}/nco-4.6.2/debian;cd ~/nco/debian/source;/bin/cp format ${DATA}/nco-4.6.2/debian/source # Replace debian directory with _CURRENT_ (main trunk) settings
+sudo /bin/rm -rf ${DATA}/nco-4.6.3 ${DATA}/nco_4.6.3* ${DATA}/debian # Cleanup last build. sudo necessary for removal because dpkg-buildpackage uses sudo?
+# cd ${DATA};cvs -d zender at nco.cvs.sf.net:/cvsroot/nco export -kkv -r nco-4.6.3-1 -d nco-4.6.3 nco # Export based on tag
+cd ${DATA};cvs -d zender at nco.cvs.sf.net:/cvsroot/nco export -kkv -D "1 second ago" -d nco-4.6.3 nco # Export most recent
+tar cvzf ./nco_4.6.3.orig.tar.gz --exclude='nco-4.6.3/debian*' --exclude='.cvsignore' --exclude='ncap_lex.c' --exclude='ncap_yacc.[ch]' ./nco-4.6.3
+/bin/rm -rf ${DATA}/nco-4.6.3 # Remove cvs-exported directory
+tar xvzf ./nco_4.6.3.orig.tar.gz # Untar to get directory without excluded files
+mkdir -p ${DATA}/nco-4.6.3/debian/source;cd ~/nco/debian;/bin/cp changelog compat control convert copyright doc-base files info rules ${DATA}/nco-4.6.3/debian;cd ~/nco/debian/source;/bin/cp format ${DATA}/nco-4.6.3/debian/source # Replace debian directory with _CURRENT_ (main trunk) settings
#export DEB_BUILD_OPTIONS='disable-dap-netcdf disable-netcdf4 disable-udunits2'; # Disable optional packages based on available Debian support
-#cd ${DATA}/nco-4.6.2;dpkg-buildpackage -rsudo -uc -us > ~/foo.nco 2>&1 # -uc -us: Do not sign changes or source files
-#cd ${DATA}/nco-4.6.2;dpkg-buildpackage -rsudo -sa > ~/foo.nco 2>&1 # -sa: Include _orig.tar.gz in .changes
-cd ${DATA}/nco-4.6.2;dpkg-buildpackage -rsudo > ~/foo.nco 2>&1
+#cd ${DATA}/nco-4.6.3;dpkg-buildpackage -rsudo -uc -us > ~/foo.nco 2>&1 # -uc -us: Do not sign changes or source files
+#cd ${DATA}/nco-4.6.3;dpkg-buildpackage -rsudo -sa > ~/foo.nco 2>&1 # -sa: Include _orig.tar.gz in .changes
+cd ${DATA}/nco-4.6.3;dpkg-buildpackage -rsudo > ~/foo.nco 2>&1
sudo dpkg --remove nco
-sudo dpkg --install ${DATA}/nco_4.6.2-1_*.deb
+sudo dpkg --install ${DATA}/nco_4.6.3-1_*.deb
cd ~/nco/bld;MY_BIN_DIR=/usr/bin ../bm/nco_bm.pl --regress
# http://lintian.debian.org/full/zender@uci.edu.html
-lintian ${DATA}/nco_4.6.2-1_*.deb
-ls -l ${DATA}/nco_4.6.2*
+lintian ${DATA}/nco_4.6.3-1_*.deb
+ls -l ${DATA}/nco_4.6.3*
m ~/foo.nco
# Upload Ubuntu (rather than Debian) packages to websites
-scp ${DATA}/nco_4.6.2* dust.ess.uci.edu:/var/www/html/nco/src
-scp ${DATA}/nco_4.6.2* zender,nco at web.sf.net:/home/project-web/nco/htdocs/src
+scp ${DATA}/nco_4.6.3* dust.ess.uci.edu:/var/www/html/nco/src
+scp ${DATA}/nco_4.6.3* zender,nco at web.sf.net:/home/project-web/nco/htdocs/src
# NB: Make sure RPMs build before uploading to debian, since changing
# Debian versions is a PITA
# NB: Only upload pbuilder Debian Sid (not personal Ubuntu) .deb builds to Debian mentors
-# cd ${DATA};dupload -t mentors nco_4.6.2-1_*.changes
+# cd ${DATA};dupload -t mentors nco_4.6.3-1_*.changes
bsrc # Reset shell environment for regular development
# New build system #2
@@ -202,52 +202,52 @@ DIST=sid sudo pbuilder update # Update chroot before building package in it
# dget http://ftp.debian.org/debian/pool/main/n/nco/nco_3.9.0-1.dsc
# dget http://ftp.debian.org/debian/pool/main/n/netcdf/netcdf_3.6.1-1.dsc
# apt-get source nco # Get package source
-sudo /bin/rm /var/cache/pbuilder/result/nco_4.6.2* # Cleanup prior build
+sudo /bin/rm /var/cache/pbuilder/result/nco_4.6.3* # Cleanup prior build
# To pass DEB_BUILD_OPTIONS to pbuilder while using sudo, one must first
# modify sudoers with visudo to prevent sudo from resetting environment
#export DEB_BUILD_OPTIONS='disable-dap-netcdf disable-netcdf4 disable-udunits2'; # Disable optional packages based on available Debian support
-cd ${DATA};DIST=sid sudo pbuilder build nco_4.6.2-1.dsc > ~/foo.nco.pbuilder 2>&1
-cd /var/cache/pbuilder/result;debsign -k6F635D10 nco_4.6.2-1_*.changes
-lintian /var/cache/pbuilder/result/nco_4.6.2-1_*.deb
+cd ${DATA};DIST=sid sudo pbuilder build nco_4.6.3-1.dsc > ~/foo.nco.pbuilder 2>&1
+cd /var/cache/pbuilder/result;debsign -k6F635D10 nco_4.6.3-1_*.changes
+lintian /var/cache/pbuilder/result/nco_4.6.3-1_*.deb
sudo dpkg --remove nco
-sudo dpkg --install /var/cache/pbuilder/result/nco_4.6.2-1_*.deb
+sudo dpkg --install /var/cache/pbuilder/result/nco_4.6.3-1_*.deb
cd ~/nco/bld;MY_BIN_DIR=/usr/bin ../bm/nco_bm.pl --regress
# NB: Upload pbuilder Debian Sid packages to Debian mentors, but not
# to personal or NCO websites since most people use Ubuntu not Debian
# NB: Debian versions are a PITA, ensure RPMs build before uploading to Debian
-cd /var/cache/pbuilder/result;dupload -t mentors nco_4.6.2-1_*.changes
+cd /var/cache/pbuilder/result;dupload -t mentors nco_4.6.3-1_*.changes
# RPM builds as root
export rpm_root='/usr/src/redhat'
# export sudo_sng='' # sudo not-necessary when builing in user directories
export sudo_sng='sudo' # sudo necessary when building in system directories
cd ~/nco;cvc;cvu
-/bin/rm -rf ${DATA}/nco-4.6.2 ${DATA}/nco-4.6.2* # Cleanup last build
+/bin/rm -rf ${DATA}/nco-4.6.3 ${DATA}/nco-4.6.3* # Cleanup last build
${sudo_sng} /bin/rm -r -f \
-${rpm_root}/BUILD/nco-4.6.2 \
-${rpm_root}/RPMS/i386/nco-4.6.2-?.i386.rpm \
-${rpm_root}/RPMS/i386/nco-debuginfo-4.6.2-?.i386.rpm \
-${rpm_root}/RPMS/i386/nco-devel-4.6.2-?.i386.rpm \
-${rpm_root}/SOURCES/nco-4.6.2.tar.gz \
-${rpm_root}/SPECS/nco-4.6.2.spec \
-${rpm_root}/SRPMS/nco-4.6.2-?.src.rpm
-cd ${DATA};cvs -d zender at nco.cvs.sf.net:/cvsroot/nco export -kkv -r nco-4.6.2-1 -d nco-4.6.2 nco # Export based on tag
-${sudo_sng} ln -s ${HOME}/nco/bld/nco.spec ${rpm_root}/SPECS/nco-4.6.2.spec
-tar cvzf ./nco-4.6.2.tar.gz --exclude='nco-4.6.2/debian*' --exclude='.cvsignore' --exclude='ncap_lex.c' --exclude='ncap_yacc.[ch]' ./nco-4.6.2
-${sudo_sng} /bin/cp ${DATA}/nco-4.6.2.tar.gz ${rpm_root}/SOURCES
+${rpm_root}/BUILD/nco-4.6.3 \
+${rpm_root}/RPMS/i386/nco-4.6.3-?.i386.rpm \
+${rpm_root}/RPMS/i386/nco-debuginfo-4.6.3-?.i386.rpm \
+${rpm_root}/RPMS/i386/nco-devel-4.6.3-?.i386.rpm \
+${rpm_root}/SOURCES/nco-4.6.3.tar.gz \
+${rpm_root}/SPECS/nco-4.6.3.spec \
+${rpm_root}/SRPMS/nco-4.6.3-?.src.rpm
+cd ${DATA};cvs -d zender at nco.cvs.sf.net:/cvsroot/nco export -kkv -r nco-4.6.3-1 -d nco-4.6.3 nco # Export based on tag
+${sudo_sng} ln -s ${HOME}/nco/bld/nco.spec ${rpm_root}/SPECS/nco-4.6.3.spec
+tar cvzf ./nco-4.6.3.tar.gz --exclude='nco-4.6.3/debian*' --exclude='.cvsignore' --exclude='ncap_lex.c' --exclude='ncap_yacc.[ch]' ./nco-4.6.3
+${sudo_sng} /bin/cp ${DATA}/nco-4.6.3.tar.gz ${rpm_root}/SOURCES
cd ${rpm_root}/SPECS
-${sudo_sng} rpmbuild -ba --sign nco-4.6.2.spec > ~/foo.nco 2>&1
+${sudo_sng} rpmbuild -ba --sign nco-4.6.3.spec > ~/foo.nco 2>&1
scp \
-${rpm_root}/RPMS/i386/nco-4.6.2-?.i386.rpm \
-${rpm_root}/RPMS/i386/nco-debuginfo-4.6.2-?.i386.rpm \
-${rpm_root}/RPMS/i386/nco-devel-4.6.2-?.i386.rpm \
-${rpm_root}/SRPMS/nco-4.6.2-?.src.rpm \
+${rpm_root}/RPMS/i386/nco-4.6.3-?.i386.rpm \
+${rpm_root}/RPMS/i386/nco-debuginfo-4.6.3-?.i386.rpm \
+${rpm_root}/RPMS/i386/nco-devel-4.6.3-?.i386.rpm \
+${rpm_root}/SRPMS/nco-4.6.3-?.src.rpm \
dust.ess.uci.edu:/var/www/html/nco/src
scp \
-${rpm_root}/RPMS/i386/nco-4.6.2-?.i386.rpm \
-${rpm_root}/RPMS/i386/nco-debuginfo-4.6.2-?.i386.rpm \
-${rpm_root}/RPMS/i386/nco-devel-4.6.2-?.i386.rpm \
-${rpm_root}/SRPMS/nco-4.6.2-?.src.rpm \
+${rpm_root}/RPMS/i386/nco-4.6.3-?.i386.rpm \
+${rpm_root}/RPMS/i386/nco-debuginfo-4.6.3-?.i386.rpm \
+${rpm_root}/RPMS/i386/nco-devel-4.6.3-?.i386.rpm \
+${rpm_root}/SRPMS/nco-4.6.3-?.src.rpm \
zender,nco at web.sf.net:/home/project-web/nco/htdocs/src
# RPM builds as user
@@ -256,33 +256,33 @@ zender,nco at web.sf.net:/home/project-web/nco/htdocs/src
export rpm_root="${DATA}/rpm/nco"
#cd ~/nco;cvc;cvu # This risks committing unwanted *.[ch]pp files
mkdir -p ${DATA}/rpm/nco/TMP ${DATA}/rpm/nco/BUILD
-/bin/rm -rf ${DATA}/nco-4.6.2 ${DATA}/nco-4.6.2* # Cleanup last build
+/bin/rm -rf ${DATA}/nco-4.6.3 ${DATA}/nco-4.6.3* # Cleanup last build
/bin/rm -r -f \
-${rpm_root}/nco-4.6.2-?.src.rpm \
-${rpm_root}/nco-4.6.2.spec \
-${rpm_root}/nco-4.6.2.tar.gz \
-${rpm_root}/*/nco-4.6.2-?.*.rpm \
-${rpm_root}/*/nco-debuginfo-4.6.2-?.*.rpm \
-${rpm_root}/*/nco-devel-4.6.2-?.*.rpm
-# cd ${DATA};cvs -d zender at nco.cvs.sf.net:/cvsroot/nco export -kkv -r nco-4.6.2-1 -d nco-4.6.2 nco # Export based on tag
-cd ${DATA};cvs -d zender at nco.cvs.sf.net:/cvsroot/nco export -kkv -D "1 second ago" -dnco-4.6.2 nco # Export most recent and build as 4.6.2-1
-tar cvzf ./nco-4.6.2.tar.gz --exclude='nco-4.6.2/debian*' --exclude='.cvsignore' --exclude='ncap_lex.c' --exclude='ncap_yacc.[ch]' ./nco-4.6.2
-/bin/cp ${DATA}/nco-4.6.2.tar.gz ${rpm_root}
+${rpm_root}/nco-4.6.3-?.src.rpm \
+${rpm_root}/nco-4.6.3.spec \
+${rpm_root}/nco-4.6.3.tar.gz \
+${rpm_root}/*/nco-4.6.3-?.*.rpm \
+${rpm_root}/*/nco-debuginfo-4.6.3-?.*.rpm \
+${rpm_root}/*/nco-devel-4.6.3-?.*.rpm
+# cd ${DATA};cvs -d zender at nco.cvs.sf.net:/cvsroot/nco export -kkv -r nco-4.6.3-1 -d nco-4.6.3 nco # Export based on tag
+cd ${DATA};cvs -d zender at nco.cvs.sf.net:/cvsroot/nco export -kkv -D "1 second ago" -dnco-4.6.3 nco # Export most recent and build as 4.6.3-1
+tar cvzf ./nco-4.6.3.tar.gz --exclude='nco-4.6.3/debian*' --exclude='.cvsignore' --exclude='ncap_lex.c' --exclude='ncap_yacc.[ch]' ./nco-4.6.3
+/bin/cp ${DATA}/nco-4.6.3.tar.gz ${rpm_root}
ln -s ${HOME}/nco/bld/nco.spec ${rpm_root}/nco.spec
cd ${rpm_root}
rpmbuild -ba --sign nco.spec > ~/foo.nco 2>&1
-rpmlint ${rpm_root}/*/nco-4.6.2-?.*.rpm
+rpmlint ${rpm_root}/*/nco-4.6.3-?.*.rpm
sudo yum remove nco
-sudo yum install ${rpm_root}/*/nco-4.6.2-?.*.rpm
+sudo yum install ${rpm_root}/*/nco-4.6.3-?.*.rpm
scp \
-${rpm_root}/*/nco-4.6.2-?.*.rpm \
-${rpm_root}/*/nco-debuginfo-4.6.2-?.*.rpm \
-${rpm_root}/*/nco-devel-4.6.2-?.*.rpm \
-${rpm_root}/nco-4.6.2-?.*.src.rpm \
+${rpm_root}/*/nco-4.6.3-?.*.rpm \
+${rpm_root}/*/nco-debuginfo-4.6.3-?.*.rpm \
+${rpm_root}/*/nco-devel-4.6.3-?.*.rpm \
+${rpm_root}/nco-4.6.3-?.*.src.rpm \
dust.ess.uci.edu:/var/www/html/nco/src
scp \
-${rpm_root}/*/nco-4.6.2-?.*.rpm \
-${rpm_root}/*/nco-debuginfo-4.6.2-?.*.rpm \
-${rpm_root}/*/nco-devel-4.6.2-?.*.rpm \
-${rpm_root}/nco-4.6.2-?.*.src.rpm \
+${rpm_root}/*/nco-4.6.3-?.*.rpm \
+${rpm_root}/*/nco-debuginfo-4.6.3-?.*.rpm \
+${rpm_root}/*/nco-devel-4.6.3-?.*.rpm \
+${rpm_root}/nco-4.6.3-?.*.src.rpm \
zender,nco at web.sf.net:/home/project-web/nco/htdocs/src
diff --git a/doc/highlights_old.shtml b/doc/highlights_old.shtml
index 939a692..074ffd5 100644
--- a/doc/highlights_old.shtml
+++ b/doc/highlights_old.shtml
@@ -242,7 +242,7 @@ Improve IRIX 6.5 support</li>
<li><b>NCO 3.2.0</b>: More forgiving exclusion list (<tt>-x -v <i>var_nm</i></tt>) <a href="http://nco.sf.net/nco.html#xcl">handling</a>; Fix <tt>rmssdn</tt> normalization; Support Mac OS X on Intel; Documentation bugfixes</li>
<li><b>NCO 3.1.9</b>: <tt>ncap2</tt> in RPM packages; Quieter output; AIX <tt>configure</tt> supports <tt>ncap2</tt>; Update to DAP for netCDF 3.6.2; Fix <tt>nc[erw]a</tt> for coordinate min/max/ttl; <tt>ncecat</tt> allows files to differ in record dimension size</li>
<li><b>NCO 3.1.8</b>: Support <tt>_FillValue</tt> with compile-time switch; Debian package synchronized, includes <tt>ncap2</tt></li>
-<li><b>NCO 3.1.7</b>: <tt>ncap2</tt> “double-parsing”, array initialization, supercedes <tt>ncap</tt></li>
+<li><b>NCO 3.1.7</b>: <tt>ncap2</tt> “double-parsing”, array initialization, supersedes <tt>ncap</tt></li>
<li><b>NCO 3.1.6</b>: Support <a href="http://nco.sf.net/nco.html#srd">stride</a> in all hyperslabbing operators; change more WARNINGs to INFOs</li>
<li><b>NCO 3.1.5</b>: New <tt>ncap2</tt> array and hyperslab features; change some WARNINGs to INFOs, add Pathscale and update PGI and Intel compiler support</li>
<li><b>NCO 3.1.4</b>: Fix <tt>ncbo</tt> memory problem; report timer results</li>
diff --git a/doc/index.shtml b/doc/index.shtml
index 5b821d0..1884f60 100644
--- a/doc/index.shtml
+++ b/doc/index.shtml
@@ -70,7 +70,7 @@ Try to disable Spammers' machines:
<p><h1 align="center">Bienvenue sur le netCDF Operator (NCO) site</h1>
<p><h2>
-Current stable NCO version is 4.6.1 released <!--#flastmod file="src/nco-4.6.1.tar.gz"-->
+Current stable NCO version is 4.6.3 released <!--#flastmod file="src/nco-4.6.3.tar.gz"-->
</h2>
<table border=0 width=100%>
@@ -149,7 +149,8 @@ and
<h2>Recent Releases & Milestones</h2>
<ul>
-<li>2016 Dec 16: 4.6.3 (<i>In progress</i>)
+<li>2017 Jan ??: 4.6.4 (<i>In progress</i>)
+<li>2016 Dec 23: 4.6.3 (Minor features)
<li>2016 Nov 16: 4.6.2 (JSON)
<li>2016 Sep 19: <i>Geosci. Model Dev.</i> publishes PPC <a href="http://www.geosci-model-dev.net/9/3199/2016">paper</a></li>
<li>2016 Aug 06: 4.6.1 (Stability)
@@ -617,18 +618,23 @@ Identical copies of those tarballs are also stored
<a href="http://nco.sf.net/src">here</a> on SourceForge for historical
continuity.
You may retrieve the source of tagged versions directly with, e.g.,
-<tt>git clone -b 4.6.1 http://github.com/nco/nco.git nco-4.6.1</tt></a>.
+<tt>git clone -b 4.6.3 http://github.com/nco/nco.git nco-4.6.3</tt></a>.
<ul>
-<li><b>NCO 4.6.4</b>: (<i>Future</i>)
+<li><b>NCO 4.6.5</b>: (<i>Future</i>)
<tt>ncks</tt> prints human-legible ISO8601 dates;
<a href="http://nco.sf.net/nco.html#cnk">Chunking</a> bytes not elements, caching;
extensive hashing?;
netCDF4 compound types?;
Optimize diskless files?;</li>
-<li><b>NCO 4.6.3</b>: (<i>In Progress, features in-progress or completed include</i>)
-CMake build option;</li>
-<tt>ncks --json</tt> prints strided brackets for multi-dimensional arrays;
+<li><b>NCO 4.6.4</b>: (<i>In Progress, features in-progress or completed include</i>)
<tt>ncks --xtn</tt> better extensive variable treatment;</li>
+<li><b>NCO 4.6.3</b>: (<i>Current Stable Release</i>)
+CMake build option;
+<tt>ncap2</tt> udunits() function;
+<tt>ncclimo</tt> supports binary climos, annual mode;
+<tt>ncclimo, ncremap</tt> support long-options;
+<tt>ncks --cdl</tt> attribute types as comments;
+<tt>ncks --json</tt> strided brackets for multi-dimensional arrays;</li>
<li><b>NCO 4.6.2</b>: (<i>Current Stable Release</i>)
Improved <tt>ncclimo, ncremap</tt> behavior in <tt>module</tt> environments;
<tt>ncks --json</tt> for JSON output;
@@ -771,7 +777,7 @@ goodies besides NCO) to your automatically-searched channels with
‘<tt>conda config --add channels --conda-forge</tt>’, then
install NCO with ‘<tt>conda install nco</tt>’.
The default NCO installed by <tt>conda</tt> is generally within a month of the latest release.
-<li><a href="https://github.com/conda-forge/nco-feedstock">nco-4.6.1</a> Executables Anaconda-compatible. Maintained by Filipe Fernandes.</li>
+<li><a href="https://github.com/conda-forge/nco-feedstock">nco-4.6.3</a> Executables Anaconda-compatible. Maintained by Filipe Fernandes.</li>
Thanks to Rich Signell, Filipe Fernandes (and others?) for developing and maintaining the NCO package for conda.
</ul>
@@ -780,7 +786,7 @@ Thanks to Rich Signell, Filipe Fernandes (and others?) for developing and mainta
<h3><a href="http://www.debian.org">Debian</a> and <a href="http://www.ubuntu.com">Ubuntu</a> GNU/Linux</a></h3>
<ul>
<!--
-<li><a href="http://www.debian.org">Debian</a> and <a href="http://www.ubuntu.com">Ubuntu</a> GNU/Linux-compatible Intel systems, prebuilt binary executable <a href="http://www.debian.org">deb</a>: <a href="http://packages.debian.org/testing/math/nco.html">nco-4.6.1</a></li>
+<li><a href="http://www.debian.org">Debian</a> and <a href="http://www.ubuntu.com">Ubuntu</a> GNU/Linux-compatible Intel systems, prebuilt binary executable <a href="http://www.debian.org">deb</a>: <a href="http://packages.debian.org/testing/math/nco.html">nco-4.6.3</a></li>
-->
<a href="http://packages.debian.org/unstable/science/nco">Debian NCO</a> and
<a href="https://launchpad.net/ubuntu/+source/nco">Ubuntu NCO</a> homepages.
@@ -788,8 +794,8 @@ Thanks to Rich Signell, Filipe Fernandes (and others?) for developing and mainta
NCO packages in the Debian/Ubuntu repositories (e.g., Sid and Raring) generally lag the packages distributed here by 6–12 months.
<a name="beta"></a><a name="prerelease"> <!-- http://nco.sf.net#beta -->
Newer (beta- or pre-release) packages are often available for intrepid Debian/Ubuntu users as described <a href="https://github.com/nco/nco/tree/master/doc/beta.txt">here</a>.
-<dt>Debian package for most recent NCO release (install with, e.g., ‘<tt>dpkg --install nco_4.6.1-1_i386.deb</tt>’):</dt>
-<li><a href="https://launchpad.net/ubuntu/+source/nco/4.6.1-1">nco_4.6.1-1_amd64.deb</a> : Executables AMD64-compatible</li>
+<dt>Debian package for most recent NCO release (install with, e.g., ‘<tt>dpkg --install nco_4.6.3-1_i386.deb</tt>’):</dt>
+<li><a href="https://launchpad.net/ubuntu/+source/nco/4.6.3-1">nco_4.6.3-1_amd64.deb</a> : Executables AMD64-compatible</li>
Thanks to Daniel Baumann, Sebastian Couwenberg, Barry deFreese, Francesco Lovergine,
Brian Mays, Rorik Peterson, and Matej Vela for their help packaging
NCO for Debian over the years.
@@ -801,14 +807,11 @@ NCO for Debian over the years.
<h3><a href="http://fedora.redhat.com">Fedora</a>, RedHat Enterprise Linux (<a href="http://www.redhat.com/rhel">RHEL</a>), and Community ENTerprise Operating System (<a href="http://www.centos.org">CentOS</a>) GNU/Linux</h3>
<ul>
<dt>The <a href="https://admin.fedoraproject.org/pkgdb/package/nco">Fedora NCO</a> RPMs are usually up-to-date so that ‘<tt>dnf install nco</tt>’ will install a recent version.
+RHEL NCO RPMs are documented at the Fedora site.
+OpenSUSE keeps its NCO RPMs <a href="http://software.opensuse.org/package/nco">here</a>.
A comprehensive list of pre-built RPMs for many OS's is <a href="http://rpmfind.net/linux/rpm2html/search.php?query=nco">here</a>.
-<li><a href="ftp://rpmfind.net/linux/fedora/linux/development/rawhide/x86_64/os/Packages/n/nco-4.3.2-1.fc20.x86_64.rpm">nco-4.3.2-1.fc20.x86_64.rpm</a>: Executables for x86_64/Fedora Core 20-compatible environments</li></dt>
-<dt>If not, try our own most recent (we stopped building RPMs many years ago and are looking for a volunteer to do this instead) self-built NCO RPMs (install with, e.g., ‘<tt>dnf install nco-3.9.5-1.fc7.i386.rpm</tt>’):
-<li><a href="src/nco-3.9.5-1.fc7.i386.rpm">nco-3.9.5-1.fc7.i386.rpm</a> (<!--#fsize file="src/nco-3.9.5-1.fc7.i386.rpm"-->): Executables for i386/Fedora Core 7-compatible environments (last updated <!--#flastmod file="src/nco-3.9.5-1.fc7.i386.rpm"-->)</li>
-<li><a href="src/nco-3.9.5-1.fc7.src.rpm">nco-3.9.5-1.fc7.src.rpm</a> (<!--#fsize file="src/nco-3.9.5-1.fc7.src.rpm"-->): Source (last updated <!--#flastmod file="src/nco-3.9.5-1.fc7.src.rpm"-->)</li>
-<li><a href="src/nco-3.9.5-1.x86_64.rpm">nco-3.9.5-1.x86_64.rpm</a> (<!--#fsize file="src/nco-3.9.5-1.x86_64.rpm"-->): Executables for x86_64/CentOS 5-compatible environments (last updated <!--#flastmod file="src/nco-3.9.5-1.x86_64.rpm"-->)</li>
Volunteers have updated and maintained fairly up-to-date NCO packages in Fedora since it was added by Ed Hill in about 2004.
-Thanks to Patrice Dumas, Ed Hill, and Orion Poplawski for packaging NCO RPMs over the years.
+Thanks to Patrice Dumas, Ed Hill, Orion Poplawski, and Manfred Schwarb for packaging NCO RPMs over the years.
Thanks to Gavin Burris and Kyle Wilcox for documenting build procedures for RHEL and CentOS.
</ul>
@@ -834,12 +837,12 @@ Thanks to Gavin Burris and Kyle Wilcox for documenting build procedures for RHEL
<h3><a href="http://www.apple.com/macosx/">Mac OS X/Darwin</a></h3>
<ul>
<!--
-# Mac OS X 10.11 (El Capitan) systems (aerosol):
+# Mac OS X 10.12 (Sierra) systems (aerosol):
/usr/bin/scp ~/nco/doc/index.shtml zender,nco at web.sf.net:/home/project-web/nco/htdocs
-cd ~/bin;tar cvzf ${DATA}/nco-4.6.1.macosx.10.11.tar.gz nc*;scp ${DATA}/nco-4.6.1.macosx.10.11.tar.gz zender,nco at web.sf.net:/home/project-web/nco/htdocs/src
+cd ~/bin;tar cvzf ${DATA}/nco-4.6.3.macosx.10.12.tar.gz nc*;scp ${DATA}/nco-4.6.3.macosx.10.12.tar.gz zender,nco at web.sf.net:/home/project-web/nco/htdocs/src
-->
The most up-to-date executables are probably those in the tarball below. Those unfamiliar with installing executables from tarballs may try the (older) <a href="http://en.wikipedia.org/wiki/Apple_Disk_Image">DMG</a> files (you may need to add <tt>/opt/local/bin</tt> to your executable path to access those operators).
-<li><a href="src/nco-4.6.1.macosx.10.11.tar.gz">nco-4.6.1.macosx.10.11.tar.gz</a> (<!--#fsize file="src/nco-4.6.1.macosx.10.11.tar.gz"-->): Executables MacOSX 10.11-compatible (last updated <!--#flastmod file="src/nco-4.6.1.macosx.10.11.tar.gz"-->).
+<li><a href="src/nco-4.6.3.macosx.10.12.tar.gz">nco-4.6.3.macosx.10.12.tar.gz</a> (<!--#fsize file="src/nco-4.6.3.macosx.10.12.tar.gz"-->): Executables MacOSX 10.12-compatible (last updated <!--#flastmod file="src/nco-4.6.3.macosx.10.12.tar.gz"-->).
(NB: These executables require
the <a href="http://trac.macosforge.org/projects/macports/wiki">MacPorts</a>
<a href="#bld_macports">dependencies</a> for <a href="http://svn.macports.org/repository/macports/trunk/dports/science/nco">NCO</a>). Maintained by NCO Project.</li>
@@ -861,7 +864,7 @@ To build NCO from source yourself using MSVC or Qt, please see the <a href="nco_
<ul>
<!-- Copy files from http://glace.ess.uci.edu
/usr/bin/scp /home/pvicente/nco/doc/index.shtml pvicente,nco at web.sf.net:/home/project-web/nco/htdocs
-/usr/bin/scp /home/pvicente/windows_setup/nco-4.6.2.windows.mvs.exe pvicente,nco at web.sf.net:/home/project-web/nco/htdocs/src
+/usr/bin/scp /home/pvicente/windows_setup/nco-4.6.3.windows.mvs.exe pvicente,nco at web.sf.net:/home/project-web/nco/htdocs/src
/usr/bin/scp /home/pvicente/windows_setup/nco-4.4.5.win32.cygwin.tar.gz pvicente,nco at web.sf.net:/home/project-web/nco/htdocs/src
-->
<li><a href="src/nco-4.6.2.windows.mvs.exe">nco-4.6.2.windows.mvs.exe</a> (<!--#fsize file="src/nco-4.6.2.windows.mvs.exe"-->) : Windows Self-Extracting Installer (last updated <!--#flastmod file="src/nco-4.6.2.windows.mvs.exe"-->). Maintained by Pedro Vicente.</li>
@@ -889,8 +892,8 @@ Thanks to Cygnus Solutions and RedHat Inc. for developing and supporting Cygwin
<!-- http://nco.sf.net#RTFM -->
<dt><a name="RTFM"></a></dt>
<dt><a name="rtfm"></a></dt>
-<h2>Documentation and User's Guide</h2>
-<p>The NCO User's Guide is available for reading in these formats:
+<h2>Documentation and Users Guide</h2>
+<p>The NCO Users Guide is available for reading in these formats:
<ul>
<li><a href="./nco.dvi">DVI</a> Device Independent (<tt>kdvi</tt>, <tt>xdvi</tt>)</li>
<li><a href="./nco.html">HTML</a> Hypertext (any browser)</li>
@@ -1035,11 +1038,11 @@ site.</li>
The simplest way to acquire the source is to download the compressed tarball:
<ul>
<li>
-<!-- scp ${DATA}/nco-4.6.1.tar.gz zender,nco at web.sf.net:/home/project-web/nco/htdocs/src -->
-<a href="https://github.com/nco/nco/archive/4.6.1.tar.gz">nco-4.6.1.tar.gz</a>
-(<!--#fsize file="src/nco-4.6.1.tar.gz"--> compressed tar-file)<br>
-<!--#exec cmd="openssl dgst -md5 src/nco-4.6.1.tar.gz"--><br>
-<!--#exec cmd="openssl dgst -sha1 src/nco-4.6.1.tar.gz"-->
+<!-- scp ${DATA}/nco-4.6.3.tar.gz zender,nco at web.sf.net:/home/project-web/nco/htdocs/src -->
+<a href="https://github.com/nco/nco/archive/4.6.3.tar.gz">nco-4.6.3.tar.gz</a>
+(<!--#fsize file="src/nco-4.6.3.tar.gz"--> compressed tar-file)<br>
+<!--#exec cmd="openssl dgst -md5 src/nco-4.6.3.tar.gz"--><br>
+<!--#exec cmd="openssl dgst -sha1 src/nco-4.6.3.tar.gz"-->
</li>
</ul>
@@ -1058,8 +1061,8 @@ You may retrieve any NCO distribution you wish from
<a href="https://help.github.com">GitHub</a>.
Usually you wish to retrieve a recent tagged (i.e., released) version.
This command retrieves the entire NCO repository (< 20 MB) and
-then checks out NCO version <tt>4.6.1</tt>:
-<p><tt>git clone https://github.com/nco/nco.git;cd nco;git checkout 4.6.1</tt></p>
+then checks out NCO version <tt>4.6.3</tt>:
+<p><tt>git clone https://github.com/nco/nco.git;cd nco;git checkout 4.6.3</tt></p>
These commands retrieve the current (“bleeding edge”)
development version of NCO into a local directory named <tt>nco</tt>:
<p><tt>git clone https://github.com/nco/nco.git ~/nco</tt></p> or
@@ -1067,9 +1070,9 @@ development version of NCO into a local directory named <tt>nco</tt>:
Track changes to the development version using
<p><tt>cd nco;git pull</tt></p>
One difference between running a "tagged" release
-(e.g., <tt>4.6.1</tt>) and the development version is that the
+(e.g., <tt>4.6.3</tt>) and the development version is that the
tagged release operators will print a valid version number (e.g.,
-<tt>4.6.1</tt>) when asked to do so with the <tt>-r</tt> flag
+<tt>4.6.3</tt>) when asked to do so with the <tt>-r</tt> flag
(e.g., <tt>ncks -r</tt>).
The development version simply places today's date in place of the
version.
@@ -1216,9 +1219,9 @@ Users should instead first download and install the Antlr found <a href="http://
then build the latest stable NCO and install it in,
e.g., <tt>/usr/local</tt> with:
<tt>
-<dt>wget https://github.com/nco/nco/archive/4.6.1.tar.gz</dt>
-<dt>tar xvzf 4.6.1.tar.gz</dt>
-<dt>cd nco-4.6.1</dt>
+<dt>wget https://github.com/nco/nco/archive/4.6.3.tar.gz</dt>
+<dt>tar xvzf 4.6.3.tar.gz</dt>
+<dt>cd nco-4.6.3</dt>
<dt>./configure --prefix=/usr/local</dt>
<dt>make</dt>
<dt>sudo make install</dt>
@@ -1235,6 +1238,24 @@ in the first sentence of your post.
Yes, “bonobo”.
Otherwise we will likely redirect you here.
For more sophisticated build/install options, see the next section.
+<li>CMake build. To build with CMake, do:
+<tt>
+<dt>mkdir build</dt>
+<dt>cd build</dt>
+<dt>cmake ..</dt>
+<dt>make</dt>
+</tt>
+The CMake script tries to find header and dependency libraries in standard locations.
+They can also be set manually with, e.g.,
+<tt>
+<dt>cmake .. -DNETCDF_INCLUDE:PATH=/my/netcdf/include/path
+-DNETCDF_LIBRARY=/my/netcdf/library/file
+-DHDF5_LIBRARY=/my/hdf5/library/file
+-DHDF5_HL_LIBRARY=/my/hdf5 high level/library/file
+-DSZIP_LIBRARY=/my/szip/library/file
+-DZLIB_LIBRARY=/my/zlib/library/file
+-DCURL_LIBRARY=/my/curl/library/file</dt>
+</tt>
</ul>
<dt><a name="Makefile"></a></dt> <!-- http://nco.sf.net#Makefile -->
@@ -1268,14 +1289,15 @@ is usually untagged.
<ul>
<li>ANL ALCF Cooley <tt>cooley.alcf.anl.gov</tt>: <tt>~zender/bin</tt></li>
<li>ANL ALCF Mira <tt>mira.alcf.anl.gov</tt>: <tt>~zender/bin</tt></li>
+<li>ANL LCRC Blues <tt>blues.lcrc.anl.gov</tt>: <tt>~zender/bin</tt></li>
<li>LLNL <tt>aims4.llnl.gov</tt>: <tt>~zender1/bin</tt></li>
<li>NCAR CISL <tt>yellowstone.ucar.edu</tt>: <tt>~zender/bin</tt></li>
<li>NCAR CISL <tt>mirage0.ucar.edu</tt>: <tt>~zender/bin</tt></li>
-<li>NERSC Cori <tt>cori.nersc.gov</tt>: <tt>~zender/bin</tt></li>
-<li>NERSC Edison <tt>edison.nersc.gov</tt>: <tt>~zender/bin</tt> or <tt>module load nco/4.5.5</tt></li>
+<li>NERSC Cori <tt>cori.nersc.gov</tt>: <tt>~zender/bin_cori</tt></li>
+<li>NERSC Edison <tt>edison.nersc.gov</tt>: <tt>~zender/bin_edison</tt> or <tt>module load nco</tt></li>
<li>ORNL OLCF Pileus <tt>pileus-login01.ornl.gov</tt>: <tt>~zender/bin</tt></li>
-<li>ORNL OLCF Rhea <tt>rhea.ccs.ornl.gov</tt>: <tt>~zender/bin</tt></li>
-<li>ORNL OLCF Titan <tt>titan.ccs.ornl.gov</tt>: <tt>~zender/bin</tt></li>
+<li>ORNL OLCF Rhea <tt>rhea.ccs.ornl.gov</tt>: <tt>~zender/bin_rhea</tt> or <tt>module load nco</tt></li>
+<li>ORNL OLCF Titan <tt>titan.ccs.ornl.gov</tt>: <tt>~zender/bin_titan</tt></li>
<li>UCI ESS <tt>greenplanet.ps.uci.edu</tt>: <tt>~zender/bin</tt></li>
</ul>
<hr></p>
diff --git a/doc/nco.texi b/doc/nco.texi
index 20b5929..f5d9742 100644
--- a/doc/nco.texi
+++ b/doc/nco.texi
@@ -118,12 +118,12 @@ Octave TeXInfo manual shows clean TeXInfo structure
@setfilename nco.info
@c Define edition, date, ...
- at set nco-edition 4.6.2
- at set doc-edition 4.6.2
+ at set nco-edition 4.6.3
+ at set doc-edition 4.6.3
@set copyright-years 1995--2016
@set update-year 2016
- at set update-date 16 November 2016
- at set update-month November 2016
+ at set update-date 23 December 2016
+ at set update-month December 2016
@settitle @acronym{NCO} @value{nco-edition} User Guide
@@ -2366,16 +2366,16 @@ The operators have the following memory requirements:
@command{ncwa} requires @math{MS <= 8VT} (see below).
Note that only variables that are processed, e.g., averaged,
concatenated, or differenced, contribute to @math{MS}.
-Variables which do not appear in the output file
+Variables that do not appear in the output file
(@pxref{Subsetting Files}) are never read and contribute nothing
to the memory requirements.
Further note that some operators perform internal type-promotion on some
variables prior to arithmetic (@pxref{Type Conversion}).
-For example, @command{ncra} and @command{nces} both promote integer
-types to double-precision floating-point prior to arithmetic, then
-perform the arithmetic, then demote back to the original integer type
-after arithmetic.
+For example, @command{ncra}, @command{nces}, and @command{ncwa} all
+promote integer types to double-precision floating-point prior to
+arithmetic, then perform the arithmetic, then demote back to the
+original integer type after arithmetic.
This preserves the on-disk storage type while obtaining the accuracy
advantages of floating-point arithmetic.
Since version 4.3.6 (released in September, 2013), @acronym{NCO} also
@@ -2401,19 +2401,40 @@ See @ref{RAM disks} for further details.
@html
<a name="mmr_ncwa"></a> <!-- http://nco.sf.net/nco.html#mmr_ncwa -->
@end html
- at command{ncwa} consumes between two and seven times the memory of a
-variable in order to process it.
+ at command{ncwa} consumes between two and eight times the memory of an
+ at code{NC_DOUBLE} variable in order to process it.
Peak consumption occurs when storing simultaneously in memory
one input variable, one tally array,
one input weight, one conformed/working weight, one weight tally,
one input mask, one conformed/working mask, and
one output variable.
+ at acronym{NCO}'s tally arrays are of type C-type @code{long}, whose size
+is 8-bytes on all modern computers, the same as @code{NC_DOUBLE}.
When invoked, the weighting and masking features contribute up to
-three-sevenths and two-sevenths of these requirements apiece.
+three-eighths and two-eighths of these requirements apiece.
If weights and masks are @emph{not} specified
(i.e., no @samp{-w} or @samp{-a} options)
then @command{ncwa} requirements drop to @math{MS <= 3VT}
(one input variable, one tally array, and the output variable).
+The output variable is the same size as the input variable when
+averaging over a degenerate dimension.
+However, normally the output variable is much smaller than the input,
+and is often a simple scalar, in which case the memory requirements
+drop by @math{1VT} since the output array requires essentially no
+memory.
+
+All of this is subject to the type promotion rules mentioned above.
+For example, @command{ncwa} averaging a variable of type
+ at code{NC_FLOAT} requires @math{MS <= 16VT} (rather than @math{MS <= 8VT})
+since all arrays are (at least temporarily) composed of eight-byte
+elements.
+Without mask or weights, the requirements for @code{NC_FLOAT} are
+ at math{MS <= 6VT} (rather than @math{MS <= 3VT} as for @code{NC_DOUBLE})
+due to temporary internal promotion of both the input variable and the
+output variable to type @code{NC_DOUBLE}.
+The @samp{--flt} option that suppresses promotion reduces this to
+ at math{MS <= 4VT} (the tally elements do not change size), and to
+ at math{MS <= 3VT} when the output array is a scalar.
@cindex OpenMP
@cindex threads
@@ -7087,7 +7108,7 @@ requesting units conversion.
@cindex @code{units}
@cindex @code{time}
-The translation and dimensional innterpretation of time coordinates
+The translation and dimensional interpretation of time coordinates
shows a more powerful, and probably more common, UDUnits application.
In this example, the user prints all data between @w{4 PM} and @w{7 PM}
on @w{December 8}, 1999, from a variable whose time dimension is hours
@@ -7196,11 +7217,10 @@ calendar attributes specified by the @acronym{CF} conventions.
@item @strong{Unsupported types:}
"366_day"/"all_leap","proleptic_gregorian","julian","none"
@end table
-Unsupported types default to mixed Gregorian/Julian as defined by
+Unsupported types default to mixed Gregorian/Julian as defined by
UDUnits.
@noindent An Example: Consider the following netCDF variable
-
@example
variables:
double lon_cal(lon_cal) ;
@@ -7239,12 +7259,13 @@ Short options: None@*
@end cartouche
Time rebasing is invoked when numerous files share a common record
-coordinate, and the record coordinate units change among input files.
+coordinate, and the record coordinate basetime (not the time increment,
+e.g., days or hours) changes among input files.
The rebasing is performed automatically if and only if UDUnits is
installed.
-Usually rebasing occurs when the recoordinate is a time-based variable,
-and times are recorded in units of a time-since-basetime, and the
-basetime changes from file to file.
+Rebasing occurs when the record coordinate is a time-based variable, and
+times are recorded in units of a time-since-basetime, and the basetime
+changes from file to file.
Since the output file can have only one unit (i.e., one basetime) for
the record coordinate, @acronym{NCO}, in such cases, chooses the units
of the first input file to be the units of the output file.
@@ -7285,14 +7306,22 @@ ncrcat -v tpt -d time,"1990-1-2 12:00:00","1990-1-3 11:59:59" \
time = 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, \
26, 27, 28, 29, 30, 31, 32, 33, 34, 35 ;
@end example
+
As of @acronym{NCO} version 4.2.1 (August, 2012), @acronym{NCO}
automatically rebases not only the record coordinate (@code{time}, here)
but also any cell boundaries associated with the record coordinate
(e.g., @code{time_bnds}) (@pxref{CF Conventions}).
+
As of @acronym{NCO} version 4.4.9 (May, 2015), @acronym{NCO}
also rebases any climatology boundaries associated with the record
coordinate (e.g., @code{climatology_bounds}) (@pxref{CF Conventions}).
+As of @acronym{NCO} version 4.6.3 (December, 2016), @acronym{NCO}
+also rebases the time coordinate when the unit between files differ
+For example the first file may have @code{units="days since 2014-03-01"}
+and the second file @code{units="hours since 2014-03-10 00:00"}.
+
+
@html
<a name="mrd"></a> <!-- http://nco.sf.net/nco.html#mrd -->
@end html
@@ -11363,6 +11392,7 @@ already contain the appended dimensions.
* Loops::
* Include files::
* Sort methods::
+* UDUnits script::
* Vpointer::
* Irregular grids::
* Bilinear interpolation::
@@ -13172,11 +13202,13 @@ print("Total of odd elements ");print(sum);print("\n");
The syntax of an @var{include-file} is:
@example
#include "script.nco"
+#include "/opt/SOURCES/nco/data/tst.nco"
@end example
-The script filename is searched relative to the run directory.
+If the filename is relative and not absolute then the directory searched is relative to the run-time directory.
It is possible to nest include files to an arbitrary depth.
A handy use of inlcude files is to store often used constants.
-Use @acronym{RAM} variables if you do not want these constants written to
+Use @acronym{RAM} variables if you do not want these constants written to nc-file.
+
@var{output-file}.
@example
// script.nco
@@ -13186,12 +13218,20 @@ Use @acronym{RAM} variables if you do not want these constants written to
e=2.71828; // Regular (disk) variable, written to output
@end example
+As of @acronym{NCO} version 4.6.3 (December, 2016), The user can specify the directory(s) to be searched by specifing them in the UNIX environment var @code{NCO_PATH}. The format used is identical to the UNIX @code{PATH}. The directory(s) are only searched if the include filename is relative.
+
+ at example
+export NCO_PATH=":/home/henryb/bin/:/usr/local/scripts:/opt/SOURCES/nco/data:"
+ at end example
+
+
+
@html
<a name="srt"></a> <!-- http://nco.sf.net/nco.html#srt -->
<a name="sort"></a> <!-- http://nco.sf.net/nco.html#sort -->
<a name="remap"></a> <!-- http://nco.sf.net/nco.html#remap -->
@end html
- at node Sort methods, Vpointer, Include files, ncap2 netCDF Arithmetic Processor
+ at node Sort methods, UDUnits script, Include files, ncap2 netCDF Arithmetic Processor
@subsection @command{sort} methods
@cindex @command{sort}
@cindex @command{asort}
@@ -13463,12 +13503,72 @@ EOF
ncap2 -O -v -S ~/ncap2_foo.nco ~/nco/data/in.nc ~/foo.nc
@end example
-
@html
-<a name="vpointer"></a> <!-- http://nco.sf.net/nco.html#vpointer -->
+<a name="udunits_fnc"></a> <!-- http://nco.sf.net/nco.html#udunits_fnc -->
+<a name="units_cnv"></a> <!-- http://nco.sf.net/nco.html#units_cnv -->
@end html
+ at node UDUnits script, Vpointer, Sort methods, ncap2 netCDF Arithmetic Processor
+ at subsection UDUnits script
+ at cindex UDUnits
+
+As of @acronym{NCO} version 4.6.3 (December, 2016), @acronym{ncap2}
+includes support for UDUnits conversions.
+The function is called @code{udunits}.
+Its syntax is
+ at example
+varOut=udunits(varIn, "UnitsOutString")
+ at end example
- at node Vpointer, Irregular grids, Sort methods, ncap2 netCDF Arithmetic Processor
+The @code{udunits()} function looks for the attribute of
+ at code{varIn@@units} and fails if it is not found.
+A quirk of this function that due to attribute propagation
+ at code{varOut@@units} will be overwritten by @code{varIn@@units}.
+It is best to re-initialize this attribute AFTER the call.
+In addition if @code{varIn@@units} is of the form
+ at code{"time_interval since basetime"} then the calendar attribute
+ at code{varIn@@calendar} will read it.
+If it does not exist then the calendar used defaults to mixed
+Gregorian/Julian as defined by UDUnits.
+
+If @code{varIn} is not a floating point type then it is promoted to
+ at code{NC_DOUBLE} for the system call in the Udunits library;
+and then demoted back to to its original type after.
+ at example
+ at verbatim
+T[lon]={0.0,100.0,150.0,200.0};
+T at units="Celsius";
+// Overwrite variable
+T=udunits(T,"kelvin");
+print(T);
+// 273.15, 373.15, 423.15, 473.15 ;
+T at units="kelvin";
+
+// Rebase coordinate days to hours
+timeOld=time;
+print(timeOld);
+// 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ;
+timeOld at units="days since 2012-01-30";
+
+ at units="hours since 2012-02-01 01:00";
+timeNew=udunits(timeOld, @units);
+timeNew at units=@units;
+print(timeNew);
+// -25, -1, 23, 47, 71, 95, 119, 143, 167, 191 ;
+
+tOld=time;
+// nb in this calendar NO Leap year
+tOld at calendar="365_day";
+tOld at units="minutes since 2012-02-28 23:58:00.00";
+
+ at units="seconds since 2012-03-01 00:00";
+tNew=udunits(tOld, @units);
+tNew at units=@units;
+print(tNew);
+// -60, 0, 60, 120, 180, 240, 300, 360, 420, 480
+ at end verbatim
+ at end example
+
+ at node Vpointer, Irregular grids, UDUnits script, ncap2 netCDF Arithmetic Processor
@subsection Vpointer
@cindex vpointer
@@ -13596,8 +13696,6 @@ three_dmn_var_int:units = "watt meter-2" ;
@end verbatim
@end example
-
-
@html
<a name="rrg"></a> <!-- http://nco.sf.net/nco.html#rrg -->
<a name="rct"></a> <!-- http://nco.sf.net/nco.html#rct -->
@@ -16554,8 +16652,8 @@ ncrcat t_anm_8589_??.nc t_anm_8589_0112.nc
@noindent
SYNTAX
@example
-ncclimo [-a @var{clm_md}] [-b @var{bnd_nm}] [-c @var{caseid}] [-d @var{dbg_lvl}]
-[-e @var{yr_end}] [-f @var{fml_nm}] [-h @var{hst_nm}] [-i @var{drc_in}]
+ncclimo [-a @var{dec_md}] [-C @var{clm_md}] [-c @var{caseid}] [-d @var{dbg_lvl}]
+[-E @var{yr_prv}] [-e @var{yr_end}] [-f @var{fml_nm}] [-h @var{hst_nm}] [-i @var{drc_in}]
[-l @var{lnk_flg}] [-m @var{mdl_nm}] [-n @var{nco_opt}] [-O @var{drc_rgr}] [-o @var{drc_out}]
[-p @var{par_typ}] [-R @var{rgr_opt}] [-r @var{rgr_map}]
[-S @var{yr_prv}] [-s @var{yr_srt}] [-t @var{thr_nbr}] [-v @var{var_lst}] [-x @var{cf_flg}]
@@ -16570,23 +16668,31 @@ annual mean, and optionally regrids all these files.
There are five required options (@samp{-c}, @samp{-s}, @samp{-e},
@samp{-i}, and @samp{-o})), and more options are available to customize
the processing.
-Options are similar but not identical (due to shell limitations) to
- at command{ncremap} options.
+Options are similar to @command{ncremap} options.
Standard @command{ncclimo} usage looks like
@example
ncclimo -c caseid -s srt_yr -e end_yr -i drc_in -o drc_out
ncclimo -m mdl_nm -c caseid -s srt_yr -e end_yr -i drc_in -o drc_out
ncclimo -v var_lst -c caseid -s srt_yr -e end_yr -i drc_in -o drc_out
- at end example
-A complete description of all available flags is given in comments
-embedded in @command{ncclimo}, and a more succinct description is given
-below.
-Options in alphabetical order are:
+ncclimo --case=caseid --start=srt_yr --end=end_yr --input=drc_in --output=drc_out
+ at end example
+Options come in both short (single-letter) and long forms.
+The handful of long-option synonyms for each option allows the user
+to imbue the commands with a level of verbosity and precision that suits
+her taste.
+A complete description of all options is given below, in alphabetical
+order of the short option letter.
+Long option synonyms are given just after the letter.
+When invoked without options, @command{ncclimo} prints a succinct table
+of all options and some examples.
@table @option
- at cindex @code{-a @var{clm_md}}
- at cindex @var{clm_md}
- at item -a @var{clm_md}
-Climatology mode that determines type of @acronym{DJF} average.
+ at cindex @code{-a @var{dec_md}}
+ at cindex @code{--dec_md}
+ at cindex @code{--dec_mode}
+ at cindex @code{--december_mode}
+ at cindex @var{dec_md}
+ at item -a @var{dec_md} (@code{--dec_md}, @code{--december_mode}, @code{--dec_mode})
+December mode determines the type of @acronym{DJF} average.
The two valid options are @code{scd} (default) and @code{sdd}.
@acronym{SCD}-mode stands for ``Seasonally Continuous December''.
The first month used is December of the year before the start year
@@ -16595,9 +16701,23 @@ The last month is November of the end year specified with @samp{-e}.
@acronym{SDD}-mode stands for ``Seasonally Discontinuous December''.
The first month used is January of the specified start year.
The last month is December of the end year specified with @samp{-e}.
+ at cindex @code{-C @var{clm_md}}
+ at cindex @var{clm_md}
+ at cindex @code{--clm_md}
+ at cindex @code{--climatology_mode}
+ at cindex @code{--mode}
+ at cindex @code{--climatology}
+ at item -C @var{clm_md} (@code{--clm_md}, @code{--climatology_mode}, @code{--mode}, @code{--climatology})
+Climatology mode.
+Valid values are @samp{ann} and @samp{mth}.
+The value indicates the timespan of the averages in each input file.
+The default mode is @samp{mth}, which means input files are monthly averages.
+Use @samp{ann} if the input files are a series of annual means.
@cindex @code{-c @var{caseid}}
@cindex @var{caseid}
- at item -c @var{caseid}
+ at cindex @code{--caseid}
+ at cindex @code{--case}
+ at item -c @var{caseid} (@code{--case}, @code{--caseid}, @code{--case_id})
Simulation name, or any input filename for non- at acronym{CESM}'ish files.
For @acronym{CESM}'ish input files like
@file{famipc5_ne30_v0.3_00001.cam.h0.1980-01.nc},
@@ -16611,17 +16731,25 @@ specify @samp{-c merra2_198001.nc}.
See comments in @command{ncclimo} for further documentation.
@cindex @code{-D @var{dbg_lvl}}
@cindex @var{dbg_lvl}
- at item -D @var{dbg_lvl}
+ at cindex @code{--dbg_lvl}
+ at cindex @code{--debug_level}
+ at item -D @var{dbg_lvl} (@code{--dbg_lvl}, @code{--dbg}, @code{--debug}, @code{--debug_level})
Specifies a debugging level similar to the rest of @acronym{NCO}.
-If @math{@var{dbg_lvl} = 1}, @command{ncremap} prints more extensive
+If @math{@var{dbg_lvl} = 1}, @command{ncclimo} prints more extensive
diagnostics of its behavior.
-If @math{@var{dbg_lvl} = 2}, @command{ncremap} prints the commands
+If @math{@var{dbg_lvl} = 2}, @command{ncclimo} prints the commands
it would execute at any higher or lower debugging level, but does
not execute these commands.
-If @math{@var{dbg_lvl} > 2}, @command{ncremap} prints the diagnostic
+If @math{@var{dbg_lvl} > 2}, @command{ncclimo} prints the diagnostic
information, executes all commands, and passes-through the debugging
level to the regridder (@command{ncks}) for additional diagnostics.
- at item -e @var{end_yr}
+ at cindex @code{-e @var{end_yr}}
+ at cindex @var{end_yr}
+ at cindex @code{--end_yr}
+ at cindex @code{--end_year}
+ at cindex @code{--year_end}
+ at cindex @code{--end}
+ at item -e @var{end_yr} (@code{--end_yr}, @code{--yr_end}, @code{--end_year}, @code{--year_end}, @code{--end})
End year (example: 2000).
Unless the option @samp{-a sdd} is specified, the last month used
is November of the specified end year.
@@ -16629,7 +16757,10 @@ If @samp{-a sdd} is specified, the last month is December of the
specified end year.
@cindex @code{-f @var{fml_nm}}
@cindex @var{fml_nm}
- at item -f @var{fml_nm}
+ at cindex @code{--fml_nm}
+ at cindex @code{--family}
+ at cindex @code{--family_name}
+ at item -f @var{fml_nm} (@code{--fml_nm}, @code{--family}, @code{--family_name})
Family name (nickname) of output files.
By default output climo file names are constructed from the @var{caseid}
of the input files.
@@ -16642,7 +16773,10 @@ Example values of @var{fml_nm} are @samp{control}, @samp{experiment},
and (for a single-variable climo) @samp{FSNT}.
@cindex @code{-h @var{hst_nm}}
@cindex @var{hst_nm}
- at item -h @var{hst_nm}
+ at cindex @code{--history_name}
+ at cindex @code{--hst_nm}
+ at cindex @code{--history}
+ at item -h @var{hst_nm} (@code{--hst_nm}, @code{--history_name}, @code{--history})
History volume name of file used to generate climatologies.
This referring to the @var{hst_nm} character sequence used to construct
input file names: @code{caseid.mdl_nm.}@var{hst_nm}@code{.YYYY-MM.nc}.
@@ -16654,13 +16788,20 @@ Examples include @samp{h0} (default, works for @acronym{CAM},
@acronym{ALM/CLM}), @samp{h1}, and @samp{h} (for @acronym{CISM}).
@cindex @code{-i @var{drc_in}}
@cindex @var{drc_in}
- at item -i @var{drc_in}
+ at cindex @code{--drc_in}
+ at cindex @code{--in_drc}
+ at cindex @code{--dir_in}
+ at cindex @code{--input}
+ at item -i @var{drc_in} (@code{--drc_in}, @code{--in_drc}, @code{--dir_in}, @code{--input})
Directory containing all monthly mean files to read as input to the
climatology.
@cindex @code{-l}
- at item -l
+ at cindex @code{--lnk_flg}
+ at cindex @code{--link_flag}
+ at cindex @code{--no_amwg_links}
+ at item -l (@code{--lnk_flg}, @code{--link_flag}, @code{--no_amwg_links})
This switch (which takes no option) turns-off the default linking of
- at acronym{ACME}-climo to @acronym{AMWG}-climo filenames.
+ at acronym{ACME}-climo to @acronym{AMWG}-climo filenames.
@acronym{AMWG} omits the @acronym{YYYYMM} components of climo filenames,
resulting in shorter names.
By default @command{ncclimo} symbolically links the full
@@ -16671,25 +16812,50 @@ Use this switch to turn-off that linking and reduce filename
proliferation if you do not need @acronym{AMWG} filenames.
@cindex @code{-m @var{mdl_nm}}
@cindex @var{mdl_nm}
- at item -m @var{mdl_nm}
+ at cindex @code{--model_name}
+ at cindex @code{--model}
+ at cindex @code{--mdl_nm}
+ at item -m @var{mdl_nm} (@code{--mdl_nm}, @code{--model_name}, @code{--model})
Model name (as embedded in monthly input filenames).
Default is @samp{cam}. Other options are @samp{clm2}, @samp{ocn},
@samp{ice}, @samp{cism}, @samp{cice}, @samp{pop}.
+ at cindex @code{-n @var{nco_opt}}
+ at cindex @var{nco_opt}
+ at cindex @code{--nco_opt}
+ at cindex @code{--nco_options}
+ at cindex @code{--nco}
+ at item -n @var{nco_opt} (@code{nco_opt}, @code{nco}, @code{nco_options})
+Specifies a string of options to pass-through unaltered to
+ at command{ncks}.
+ at var{nco_opt} defaults to @samp{-O --no_tmp_fl}.
@cindex @code{-O @var{drc_rgr}}
@cindex @var{drc_rgr}
- at item -O @var{drc_rgr}
+ at cindex @code{--drc_rgr}
+ at cindex @code{--rgr_drc}
+ at cindex @code{--dir_rgr}
+ at cindex @code{--regrid}
+ at item -O @var{drc_rgr} (@code{--drc_rgr}, @code{--rgr_drc}, @code{--dir_rgr}, @code{--regrid})
Directory to hold regridded climo files.
Regridded climos are placed in @var{drc_out} unless a separate
directory for them is specified with @samp{-O} (NB: capital ``O'').
@cindex @code{-o @var{drc_out}}
@cindex @var{drc_out}
- at item -o @var{drc_out}
+ at cindex @code{--drc_out}
+ at cindex @code{--out_drc}
+ at cindex @code{--dir_out}
+ at cindex @code{--output}
+ at item -o @var{drc_out} (@code{--drc_out}, @code{--out_drc}, @code{--dir_out}, @code{--output})
Directory to hold computed (output) native grid climo files.
Regridded climos are also placed here unless a separate directory
for them is specified with @samp{-O} (NB: capital ``O'').
@cindex @code{-p @var{par_typ}}
@cindex @var{par_typ}
- at item -p @var{par_typ}
+ at cindex @code{--par_typ}
+ at cindex @code{--par_md}
+ at cindex @code{--parallel_type}
+ at cindex @code{--parallel_mode}
+ at cindex @code{--parallel}
+ at item -p @var{par_typ} (@code{--par_typ}, @code{--par_md}, @code{--parallel_type}, @code{--parallel_mode}, @code{--parallel})
Specifies the parallelism mode desired.
The options are serial mode (@samp{-p nil} or @samp{-p serial}),
background mode parallelism (@samp{-p bck}), and @acronym{MPI}
@@ -16698,9 +16864,20 @@ The default is background-mode parallelism.
The default @var{par_typ} is @samp{bck}, which means @command{ncclimo}
runs spawns up to twelve (one for each month) parallel processes at a time.
See discussion below under Memory Considerations.
+ at cindex @code{-R @var{rgr_opt}}
+ at cindex @var{rgr_opt}
+ at cindex @code{--rgr_opt}
+ at cindex @code{--regrid_options}
+ at item -R @var{rgr_opt} (@code{rgr_opt}, @code{regrid_options})
+Specifies a string of options to pass-through unaltered to
+ at command{ncks}.
+ at var{rgr_opt} defaults to @samp{-O --no_tmp_fl}.
@cindex @code{-r @var{rgr_map}}
@cindex @var{rgr_map}
- at item -r @var{rgr_map}
+ at cindex @code{--rgr_map}
+ at cindex @code{--regrid_map}
+ at cindex @code{--map}
+ at item -r @var{rgr_map} (@code{--rgr_map}, @code{--regrid_map}, @code{--map})
Regridding map.
Unless @samp{-r} is specified @command{ncclimo} produces only a
climatology on the native grid of the input datasets.
@@ -16714,7 +16891,11 @@ arguments to the @samp{-R} switch.
See below the discussion on regridding.
@cindex @code{-s @var{srt_yr}}
@cindex @var{srt_yr}
- at item -s @var{srt_yr}
+ at cindex @code{--srt_yr}
+ at cindex @code{--start_year}
+ at cindex @code{--year_start}
+ at cindex @code{--start}
+ at item -s @var{srt_yr} (@code{--srt_yr}, @code{--yr_srt}, @code{--start_year}, @code{--year_start}, @code{--start})
Start year (example: 1980).
Unless the option @samp{-a sdd} is specified, the first month used will
be December of the year before the start year (to allow for contiguous
@@ -16723,7 +16904,10 @@ If @samp{-a sdd} is specified, the first month used is January of
the specified start year.
@cindex @code{-t @var{thr_nbr}}
@cindex @var{thr_nbr}
- at item -t @var{thr_nbr}
+ at cindex @code{--thr_nbr}
+ at cindex @code{--threads}
+ at cindex @code{--thread_number}
+ at item -t @var{thr_nbr} (@code{--thr_nbr}, @code{--threads}, @code{--thread_number})
Specifies the number of threads used per regridding process
(@pxref{OpenMP Threading}).
The @acronym{NCO} regridder scales well to 8--16 threads.
@@ -16732,8 +16916,11 @@ with climatology generation in parallel climatology mode (i.e., when
@math{@var{par_typ}} = @code{mpi} or @code{bck}).
Hence @command{ncclimo} defaults to @var{thr_nbr}=2.
@cindex @code{-v @var{var_lst}}
- at cindex @var{var_lsty}
- at item -v @var{var_lst}
+ at cindex @var{var_lst}
+ at cindex @code{--var_lst}
+ at cindex @code{--variables}
+ at cindex @code{--variable_list}
+ at item -v @var{var_lst} (@code{--var_lst}, @code{--variables}, @code{--variable_list})
Variables to subset.
Same behavior as @ref{Subsetting Files}.
Regular expressions are allowed so, e.g., @samp{PREC.?} extracts
@@ -16743,16 +16930,21 @@ the variables @samp{PRECC,PRECL,PRECSC,PRECSL} if present.
@unnumberedsubsec @acronym{MPAS-O/I} considerations
@acronym{MPAS} ocean and ice models currently have their own
-(non- at acronym{CESM}'ish) naming convention for monthly output files.
- at command{ncclimo} recognizes input files as being from @acronym{MPAS}
-when invoked with @samp{-c hist} and @samp{-m ocn} or @samp{-m ice}.
-Use the optional @option{-f @var{fml_nm}} switch to replace @code{hist}
-with a more descriptive simulation name for the output.
-Invocation looks like
+(non-CESM'ish) naming convention that guarantees output files have the
+same names for all simulations.
+By default @command{ncclimo} analyzes the ``timeSeriesStatsMonthly''
+analysis member output (tell us if you want options for other analysis
+members).
+ at command{ncclimo} recognizes input files as being
+ at acronym{MPAS}-style when invoked with @samp{-m mpaso} or
+ at samp{-m mpascice} like this:
@example
-ncclimo -c hist -m ocn -s 1980 -e 1983 -i $drc_in -o $drc_out # MPAS-O
-ncclimo -c hist -m ice -s 1980 -e 1983 -i $drc_in -o $drc_out # MPAS-I
+ at verbatim
+ncclimo -m mpaso -s 1980 -e 1983 -i $drc_in -o $drc_out # MPAS-O
+ncclimo -m mpascice -s 1980 -e 1983 -i $drc_in -o $drc_out # MPAS-I
+ at end verbatim
@end example
+
@acronym{MPAS} climos are unaware of missing values until/unless
input files are ``fixed''.
We recommend that simulation producers annotate all floating point
@@ -16774,7 +16966,28 @@ although under-the-hood @command{ncclimo} does do some special
pre-processing (dimension permutation, metadata annotation) for
@acronym{MPAS}.
A five-year oEC60to30 @acronym{MPAS-O} climo with regridding to T62
-takes < 10 minutes on @file{rhea}.
+takes less than @w{10 minutes} on the machine @file{rhea}.
+
+ at unnumberedsubsec Annual climos
+Not all model or observed history files are created as monthly means.
+To create a climatological annual mean from a series of annual mean
+inputs, select @command{ncclimo}'s annual climatology mode with
+the @samp{-C ann} option:
+ at example
+ at verbatim
+ncclimo -C ann -m cism -h h -c caseid -s 1851 -e 1900 -i drc_in -o drc_out
+ at end verbatim
+ at end example
+The options @samp{-m mdl_nm} and @samp{-h hst_nm} (that default to
+ at code{cam} and @code{h0}, respectively) tell @command{ncclimo} how to
+construct the input filenames.
+The above formula names the files
+ at code{caseid.cism.h.1851-01-01-00000.nc},
+ at code{caseid.cism.h.1852-01-01-00000.nc},
+and so on.
+Annual climatology mode produces a single output file (or two if
+regridding is selected), and in all other respects behaves the same as
+monthly climatology mode.
@unnumberedsubsec Regridding Climos and Other Files
@command{ncclimo} will (optionally) regrid during climatology generation
@@ -16828,77 +17041,123 @@ ls drc_out/*climo* | ncremap -m map.nc -O drc_rgr
See @ref{ncremap netCDF Remapper} for more details (including
@acronym{MPAS}!).
- at cindex incremental climatologies (climos)
+ at cindex incremental climatology (climo)
+ at cindex binary climatology (climo)
@cindex extended climatology (climo)
@cindex previous climatology (climo)
@cindex current climatology (climo)
- at unnumberedsubsec Incremental Climatologies
-As of @acronym{NCO} version 4.6.1 (released August, 2016),
- at command{ncclimo} supports generating climatologies incrementally,
-rather as monolithic products generated from the raw monthly data.
-The so-called ``extended climo'' is computed as a weighted mean of two
-consecutive shorter climatologies, called the ``previous'' and
-``current'' climos.
-The extended climo is computed from the earlier climos, not the original
-monthly input that generated them.
-This permits ongoing simulations (or lengthy observations) to be
-analyzed in shorter segments that are incrementaly combined, instead of
-requiring all raw, native-grid data to be simultaneously accessible.
-
-Without incremental climatology capability, generating a one-hundred
+ at unnumberedsubsec Extended Climatologies
+ at command{ncclimo} supports two methods for generating extended
+climatologies: Binary and Incremental.
+Both methods lengthen a climatology without requiring access to
+all the raw monthly data spanning the time period.
+The binary method combines, with appropriate weighting, two previously
+computed climatologies into a single climatology.
+No raw monthly data are employed.
+The incremental method computes a climatology from raw monthly data
+and (with appropriate weighting) combines that with a previously
+computed climatology that ends the month prior to raw data.
+The incremental method was introduced in @acronym{NCO} version 4.6.1
+(released August, 2016), and the binary method was introduced in
+ at acronym{NCO} version 4.6.3 (released December, 2016).
+
+ at c fxm edit to distinguish extended from binary
+Both methods, binary and incremental, compute the so-called ``extended
+climo'' as a weighted mean of two shorter climatologies,
+called the ``previous'' and ``current'' climos.
+The incremental method uses the original monthly input to compute the
+curent climo, which must immediately follow in time the previous climo
+which has been pre-computed.
+The binary method use pre-computed climos for both the previous and
+current climos, and these climos need not be sequential nor
+chronological.
+Both previous and current climos for both binary and incremental methods
+may be of any length (in years); their weights will be automatically
+adjusted in computing the extended climo.
+
+The use of pre-computed climos permits ongoing simulations (or lengthy
+observations) to be analyzed in shorter segments combined piecemeal,
+instead of requiring all raw, native-grid data to be simultaneously
+accessible.
+Without extended climatology capability, generating a one-hundred
year climatology requires that one-hundred years of monthly data be
available on disk.
Disk-space requirements for large datasets may make this untenable.
-The incremental climo feature permits this one-hundred year climo to be
+Extended climo methods permits a one-hundred year climo to be
generated as the weighted mean of, say, the current ten year climatology
-(weighted at 10%) combined with the climatology of the previous 90-years
-(weighted at 90%).
-The 90-year climo could itself have been generated incrementally, and so
-on.
+(weighted at 10%) combined with the pre-computed climatology of the
+previous 90-years (weighted at 90%).
+The 90-year climo could itself have been generated incrementally or
+binary-wise, and so on.
Climatologies occupy at most 17/(12 at var{N}) the amount of space
-of @var{N} years of monthly data, so incremental climatologies
-vastly reduce disk-space requirements.
+of @var{N} years of monthly data, so the extended methods vastly
+reduce disk-space requirements.
- at command{ncclimo} first generates the current climatology from the
-current monthly input files then, if requested (with @samp{-S}), weights
-that current climo with the previous climo to produce the extended
-climo.
+Incremental mode is selected by specifying @samp{-S}, the start year
+of the pre-computed, previous climo.
The argument to @samp{-S}) is the previous climo start year.
That, together with the current climo end year, determines the extended
climo range.
@command{ncclimo} assumes that the previous climo ends the month before
the current climo begins.
-Most users will run incremental clmos the same way they run regular climos
+In incremental mode, @command{ncclimo} first generates the current
+climatology from the current monthly input files then weights
+that current climo with the previous climo to produce the extended
+climo.
+
+Binary mode is selected by specifying both @samp{-S} and @samp{-E}, the
+end year of the pre-computed, previous climo.
+In binary mode, the previous and current climatologies can be of any
+length, and from any time-period, even overlapping.
+Most users will run extended clmos the same way they run regular climos
in terms of parallelism and regridding, although that is not required.
Both climos must treat Decembers same way (or else previous climo files
will not be found), and if subsetting (i.e., @samp{-v var_lst}) is
performed, then the subset must remain the same, and if nicknames (i.e.,
@samp{-f fml_nm}) are employed, then the nickname must remain the same.
-As of 20160621, the @code{climatology_bounds} attributes of the extended
+As of 20161129, the @code{climatology_bounds} attributes of extended
climo are incorrect.
This is a work in progress...
Options:
@table @option
+ at cindex @code{-E @var{yr_end_prv}}
+ at cindex @var{yr_end_prv}
+ at cindex @code{--yr_end_prv}
+ at cindex @code{--prv_yr_end}
+ at cindex @code{--previous_end}
+ at item -E @var{yr_end_prv} (@code{--yr_end_prv}, @code{--prv_yr_end}, @code{--previous_end})
+The ending year of the previous climo.
+This argument is required to trigger binary climatologies,
+and should not be used for incremental climatologies.
@cindex @code{-S @var{yr_srt_prv}}
@cindex @var{yr_srt_prv}
- at item -S @var{yr_srt_prv}
+ at cindex @code{--yr_srt_prv}
+ at cindex @code{--prv_yr_srt}
+ at cindex @code{--previous_start}
+ at item -S @var{yr_srt_prv} (@code{--yr_srt_prv}, @code{--prv_yr_srt}, @code{--previous_start})
The starting year of the previous climo.
-This is the only mandatory argument.
-It is required to trigger incremental climatologies.
+This argument is required to trigger incremental climatologies,
+and is also mandatory for binary climatologies.
@cindex @code{-X @var{drc_xtn}}
@cindex @var{drc_xtn}
- at item -X @var{drc_xtn}
+ at cindex @code{--drc_xtn}
+ at cindex @code{--xtn_drc}
+ at cindex @code{--extended}
+ at item -X @var{drc_xtn} (@code{--drc_xtn}, @code{--xtn_drc}, @code{--extended})
Directory in which the extended native grid climo files will be stored
-for an incremental climatology.
+for an extended climatology.
Default value is @var{drc_prv}.
Unless a separate directory is specified (with @samp{-Y}) for the
extended climo on the analysis grid, it will be stored in @var{drc_xtn},
too.
@cindex @code{-x @var{drc_prv}}
@cindex @var{drc_prv}
- at item -x @var{drc_prv}
+ at cindex @code{--drc_prv}
+ at cindex @code{--prv_drc}
+ at cindex @code{--previous}
+ at item -x @var{drc_prv} (@code{--drc_prv}, @code{--prv_drc}, @code{--previous})
Directory in which the previous native grid climo files reside for an
incremental climatology.
Default value is @var{drc_out}.
@@ -16907,21 +17166,29 @@ previous climo on the analysis grid, it is assumed to reside in
@var{drc_prv}, too.
@cindex @code{-Y @var{drc_rgr_xtn}}
@cindex @var{drc_rgr_xtn}
- at item -Y @var{drc_rgr_xtn}
+ at cindex @code{--drc_rgr_xtn}
+ at cindex @code{--drc_xtn_rgr}
+ at cindex @code{--extended_regridded}
+ at cindex @code{--regridded_extended}
+ at item -Y @var{drc_rgr_xtn} (@code{--drc_rgr_xtn}, @code{--drc_xtn_rgr}, @code{--extended_regridded}, @code{--regridded_extended})
Directory in which the extended analysis grid climo files will be
stored in an incremental climatology.
Default value is @var{drc_xtn}.
@cindex @code{-y @var{drc_rgr_prv}}
@cindex @var{drc_rgr_prv}
- at item -y @var{drc_rgr_prv}
+ at cindex @code{--drc_rgr_prv}
+ at cindex @code{--drc_prv_rgr}
+ at cindex @code{--regridded_previous}
+ at cindex @code{--previous_regridded}
+ at item -y @var{drc_rgr_prv} (@code{--drc_rgr_prv}, @code{--drc_prv_rgr}, @code{--regridded_previous}, @code{--previous_regridded})
Directory in which the previous climo on the analysis grid resides in an
incremental climatology.
Default value is @var{drc_prv}.
@end table
@noindent
-Incremental climatologies can be as simple as providing a start year for
-the previous climo, e.g.,
+Incremental method climatologies can be as simple as providing a start
+year for the previous climo, e.g.,
@example
ncclimo -v FSNT,AODVIS -c caseid -s 1980 -e 1981 -i raw -o clm -r map.nc
ncclimo -v FSNT,AODVIS -c caseid -s 1982 -e 1983 -i raw -o clm -r map.nc -S 1980
@@ -16965,6 +17232,19 @@ ncclimo -v FSNT,AODVIS -c caseid -s 1982 -e 1983 -i raw -o clm -O rgr -r map.nc
-S 1980 -x prv -X xtn -y rgr_prv -Y rgr_xtn
@end example
+ at command{ncclimo} does not know whether a pre-computed climo is on a
+native grid or an analysis grid, i.e., whether it has been regridded.
+In binary mode, @command{ncclimo} may be pointed to two pre-computed
+native grid climatologies, or to two pre-computed analysis grid
+climatologies.
+In other words, it is not necessary to maintain native grid
+climatologies for use in creating extended climatologies.
+It is sufficient to generate climatologies on the analysis grid, and
+feed them to @command{ncclimo} in binary mode, without a mapping file:
+ at example
+ncclimo -c caseid -S 1980 -E 1981 -x prv -s 1980 -e 1981 -i crr -o clm
+ at end example
+
@unnumberedsubsec Coupled Runs
@command{ncclimo} works on all @acronym{ACME} and @acronym{CESM} models.
It can simultaneously generate climatologies for a coupled run, where
@@ -16983,8 +17263,8 @@ map_ocn=${DATA}/maps/map_oEC60to30_to_t62_bilin.20160301.nc
map_ice=$map_ocn
ncclimo -p mpi -c $caseid -m cam -s 2 -e 5 -i $drc_in -r $map_atm -o $drc_out/atm
ncclimo -c $caseid -m clm2 -s 2 -e 5 -i $drc_in -r $map_lnd -o $drc_out/lnd
-ncclimo -p mpi -c hist -m ocn -s 2 -e 5 -i $drc_in -r $map_ocn -o $drc_out/ocn
-ncclimo -c hist -m ice -s 2 -e 5 -i $drc_in -r $map_ice -o $drc_out/ice
+ncclimo -p mpi -m mpaso -s 2 -e 5 -i $drc_in -r $map_ocn -o $drc_out/ocn
+ncclimo -m mpascice -s 2 -e 5 -i $drc_in -r $map_ice -o $drc_out/ice
@end verbatim
@end example
Atmosphere and ocean model output is typically larger than land and ice
@@ -18225,7 +18505,7 @@ ncks [-3] [-4] [-5] [-6] [-7] [-A] [-a] [-b @var{fl_bnr}] [-C] [-c] [--cdl]
[--cnk_min @var{sz_byt}] [--cnk_plc @var{plc}] [--cnk_scl @var{sz_lmn}]
[-D @var{dbg}] [-d @var{dim},[@var{min}][,[@var{max}][,[@var{stride}]]] [-F] [--fix_rec_dmn @var{dim}]
[-G @var{gpe_dsc}] [-g @var{grp}[, at dots{}]] [--glb ...] [--grp_xtr_var_xcl]
-[-H] [-h] [--hdn] [--hdr_pad @var{nbr}] [--jsn] [--jsn_att_fmt @var{lvl}]
+[-H] [-h] [--hdn] [--hdr_pad @var{nbr}] [--jsn] [--jsn_fmt @var{lvl}]
[-L @var{dfl_lvl}] [-l @var{path}] [-M] [-m] [--map @var{map-file}]
[--md5] [--mk_rec_dmn @var{dim}] [--no_blank] [--no_tmp_fl]
[-O] [-o @var{output-file}] [-P] [-p @var{path}] [--ppc ...]
@@ -18639,7 +18919,7 @@ lat[0]=90 lev[0]=1000 lon[3]=270 three_dmn_var[3]=23
<a name="jsn"></a> <!-- http://nco.sf.net/nco.html#jsn -->
<a name="json"></a> <!-- http://nco.sf.net/nco.html#json -->
@end html
- at cindex @code{--jsn_att_fmt}
+ at cindex @code{--jsn_fmt}
@cindex @code{--jsn}
@cindex @code{--json}
@cindex @acronym{JSN}
@@ -18697,17 +18977,40 @@ Each @acronym{JSON} configuration option automatically triggers
a @acronym{JSON} configuration option is redundant and unnecessary.
Request a specific format level with the pedantic level argument to
-the @samp{--jsn_att_fmt @var{lvl}} option.
-This choice has the following effects for the given value of @var{lvl}:
+the @samp{--jsn_fmt @var{lvl}} option.
+As of @acronym{NCO} version 4.6.3 (December, 2016), the option formerly
+known as @samp{--jsn_att_fmt} was renamed simply @samp{--jsn_fmt}.
+The more general name reflects the fact that the option controls
+all @acronym{JSON} formatting, not just attribute formatting.
+As of version 4.6.3, @acronym{NCO} defaults to demarcate inner
+dimensions of variable data with (nested) square brackets rather than
+printing data as an unrolled single dimensional array.
+An array with C-ordered dimensionality [2,3,4] prints as:
+ at example
+ at verbatim
+% ncks --jsn -v three_dmn_var ~/nco/data/in.nc
+...
+"data": [[[0.0, 1.0, 2.0, 3.0], [4.0, 5.0, 6.0, 7.0], [8.0, 9.0, 10.0,11.0]], [[12.0, 13.0, 14.0, 15.0], [16.0, 17.0, 18.0, 19.0], [20.0,21.0, 22.0, 23.0]]]
+...
+% ncks --jsn_fmt=4 -v three_dmn_var ~/nco/data/in.nc
+...
+"data": [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0,22.0, 23.0]
+...
+ at end verbatim
+ at end example
+One can recover the former behavior (and omit the brackets) by adding
+four to the base pedantic level @var{lvl} (as shown above).
+Besides the potential offset of four, @var{lvl} may take one of three
+values between 0--2:
@itemize @bullet
@item @math{@var{lvl} = 0} is the default mode, and is also explicitly
-selectable with @samp{--jsn_att_fmt=0}.
+selectable with @samp{--jsn_fmt=0}.
All values are output without the original @acronym{NC_TYPE} token.
This allows attributes to print as @acronym{JSON} name-value pairs,
rather than as more complex objects:
@example
@verbatim
-% ncks --jsn_att_fmt=0 -v att_var ~/nco/data/in_grp.nc
+% ncks --jsn_fmt=0 -v att_var ~/nco/data/in_grp.nc
...
"att_var": {
"dims": ["time"],
@@ -18754,13 +19057,12 @@ variable's @code{NC_TYPE} to choose the most efficient storage type.
@item @math{@var{lvl} = 1} is a medium-pedantic level that prints all
attributes as objects (with explicit types) @emph{except} those
attributes whose types match the simplest default @acronym{JSON} value
-types (@code{NC_FLOAT},
- at code{NC_CHAR}/@code{NC_STRING}, @code{NC_INT}).
+types (@code{NC_FLOAT}, @code{NC_CHAR}/@code{NC_STRING}, @code{NC_INT}).
double, string, and int attributes are printed as @acronym{JSON} arrays,
as in the @math{@var{lvl} = 0} above:
@example
@verbatim
-% ncks --jsn_att_fmt=1 -v att_var ~/nco/data/in.nc
+% ncks --jsn_fmt=1 -v att_var ~/nco/data/in.nc
...
"att_var": {
"dims": ["time"],
@@ -18799,7 +19101,7 @@ value so that any downstream parser can (though it need not) guarantee
exact reproduction of the original dataset:
@example
@verbatim
-% ncks --jsn_att_fmt=2 -v att_var ~/nco/data/in.nc
+% ncks --jsn_fmt=2 -v att_var ~/nco/data/in.nc
...
"att_var": {
"dims": ["time"],
@@ -20708,7 +21010,10 @@ Features common to many operators are described in
@end html
@cindex @code{-a @var{alg_typ}}
@cindex @var{alg_typ}
- at item -a @var{alg_typ}
+ at cindex @code{--alg_typ}
+ at cindex @code{--algorithm}
+ at cindex @code{--regrid_algorithm}
+ at item -a @var{alg_typ} (@code{--alg_typ}, @code{--algorithm}, @code{--regrid_algorithm})
Specifies the interpolation algorithm for weight-generation for use by
@command{ESMF_RegridWeightGen} (@acronym{ERWG}).
@command{ncremap} unbundles this algorithm choice from the rest of
@@ -20732,7 +21037,11 @@ This option currently has no effect on TempestRemap weight-generation.
@end html
@cindex @code{-D @var{dbg_lvl}}
@cindex @var{dbg_lvl}
- at item -D @var{dbg_lvl}
+ at cindex @code{--dbg_lvl}
+ at cindex @code{--dbg}
+ at cindex @code{--debug}
+ at cindex @code{--debug_level}
+ at item -D @var{dbg_lvl} (@code{--dbg_lvl}, @code{--dbg}, @code{--debug}, @code{--debug_level})
Specifies a debugging level similar to the rest of @acronym{NCO}.
If @math{@var{dbg_lvl} = 1}, @command{ncremap} prints more extensive
diagnostics of its behavior.
@@ -20748,7 +21057,11 @@ level to the regridder (@command{ncks}) for additional diagnostics.
@end html
@cindex @code{-d @var{dst_fl}}
@cindex @var{dst_fl}
- at item -d @var{dst_fl}
+ at cindex @code{--dst_fl}
+ at cindex @code{--destination_file}
+ at cindex @code{--template_file}
+ at cindex @code{--template}
+ at item -d @var{dst_fl} (@code{--dst_fl}, @code{--destination_file}, @code{--template_file}, @code{--template})
Specifies a data file on the destination grid.
Currently @var{dst_fl} must be a data file (not a gridfile,
@acronym{SCRIP} or otherwise) from which @acronym{NCO} can
@@ -20770,7 +21083,10 @@ be interpolated or guessed-at.
@end html
@cindex @code{-E @var{esmf_opt}}
@cindex @var{esmf_opt}
- at item -E @var{esmf_opt}
+ at cindex @code{--esmf_opt}
+ at cindex @code{--esmf}
+ at cindex @code{--esmf_options}
+ at item -E @var{esmf_opt} (@code{--esmf_opt}, @code{--esmf}, @code{--esmf_options})
@command{ncremap} passes @var{esmf_opt} directly through to
@acronym{ERWG}.
The user-specified contents of @var{esmf_opt} supercede its default
@@ -20783,7 +21099,11 @@ in which case @var{esmf_opt} could be @samp{-r --ignore_upmapped}.
@end html
@cindex @code{-g @var{grd_dst}}
@cindex @var{grd_dst}
- at item -g @var{grd_dst}
+ at cindex @code{--grd_dst}
+ at cindex @code{--grid_dest}
+ at cindex @code{--dest_grid}
+ at cindex @code{--destination_grid}
+ at item -g @var{grd_dst} (@code{--grd_dst}, @code{--grid_dest}, @code{--dest_grid}, @code{--destination_grid})
Specifies the destination gridfile in @acronym{SCRIP} format.
@acronym{NCO} will use @acronym{ERWG} to combine @var{grd_dst} with a
source gridfile (either inferred from @var{input-file}, supplied with
@@ -20795,7 +21115,12 @@ produce the remapping weights.
<a name="grd_sng"></a> <!-- http://nco.sf.net/nco.html#grd_sng -->
@end html
@cindex @code{-G @var{grd_sng}}
- at item -G @var{grd_sng}
+ at cindex @var{--grd_sng}
+ at cindex @code{--grd_sng}
+ at cindex @code{--grid_generation}
+ at cindex @code{--grid_gen}
+ at cindex @code{--grid_string}
+ at item -G @var{grd_sng} (@code{--grd_sng}, @code{--grid_generation}, @code{--grid_gen}, @code{--grid_string})
Specifies, with @acronym{NCO} options (@pxref{Grid Generation}),
a source gridfile to create.
(Warning: @acronym{NCO}'s syntax for gridfile generations is ugly
@@ -20818,7 +21143,12 @@ and inspecting the results.
@cindex @code{-I @var{in_drc}}
@cindex @var{in_drc}
@cindex @code{stdin}
- at item -I @var{in_drc}
+ at cindex @code{--in_drc}
+ at cindex @code{--drc_in}
+ at cindex @code{--dir_in}
+ at cindex @code{--in_dir}
+ at cindex @code{--input}
+ at item -I @var{in_drc} (@code{--in_drc}, @code{--drc_in}, @code{--dir_in}, @code{--in_dir}, @code{input})
Specifies the input directory, i.e., the directory which contains
the input file(s).
If @var{in_fl} is also specified, then the input filepath is
@@ -20840,7 +21170,10 @@ through standard input, e.g., @samp{ls *.nc | ncremap ...}.
@end html
@cindex @code{-i @var{in_fl}}
@cindex @var{in_fl}
- at item -i @var{in_fl}
+ at cindex @code{--in_fl}
+ at cindex @code{--in_file}
+ at cindex @code{--input_file}
+ at item -i @var{in_fl} (@code{--in_fl}, @code{--in_file}, @code{--input_file})
Specifies the file containing data on the source grid to be remapped
to the destination grid.
When provided with the optional @var{map_fl}, @command{ncremap}
@@ -20865,11 +21198,15 @@ through standard input, e.g., @samp{ls *.nc | ncremap ...}.
@end html
@cindex @code{-j @var{job_nbr}}
@cindex @var{job_nbr}
- at item -j @var{job_nbr}
+ at cindex @code{--job_nbr}
+ at cindex @code{--job_number}
+ at cindex @code{--job_number}
+ at cindex @code{--jobs}
+ at item -j @var{job_nbr} (@code{--job_nbr}, @code{--job_number}, @code{--jobs})
Specifies the number of simultaneous regridding processes to spawn
during parallel execution for both Background and @acronym{MPI} modes.
-In both parallel modes @command{ncremap} spawns proceses in batches
-of @var{job_nbr} jobs, thens waits for those processes to complete.
+In both parallel modes @command{ncremap} spawns processes in batches
+of @var{job_nbr} jobs, then waits for those processes to complete.
Once a batch finishes, @command{ncremap} spawns the next batch.
In Background mode, all jobs are spawned to the local node.
In @acronym{MPI} mode, all jobs are spawned in round-robin fashion
@@ -20897,9 +21234,9 @@ Four jobs each with four threads consumes sixteen cores.
As an example, consider regridding 100 files with a single map.
Say you have a five-node cluster, and each node has @w{16 cores}
and can simultaneously regrid two files using eight threads each.
-(One usually needs to test a bit to determine these parameters.)
-Then an optimal (in terms of wallclock time) way to regrid would
-be requesting five nodes with 10 simultaneous jobs of eight threads.
+(One needs to test a bit to determine these parameters.)
+Then an optimal (in terms of wallclock time) invocation would
+request five nodes with @w{10 simultaneous} jobs of eight threads.
On many batch systems this would involve a scheduler command like
this @samp{qsub -l nodes=5 ...} followed by
@samp{ncremap -p mpi -j 10 -t 8 ...}.
@@ -20917,7 +21254,12 @@ by utilizing more threads per process.
@end html
@cindex @code{-m @var{map_fl}}
@cindex @var{map_fl}
- at item -m @var{map_fl}
+ at cindex @code{--map_fl}
+ at cindex @code{--map}
+ at cindex @code{--map_file}
+ at cindex @code{--rgr_map}
+ at cindex @code{--regrid_map}
+ at item -m @var{map_fl} (@code{--map_fl}, @code{--map}, @code{--map_file}, @code{--rgr_map}, @code{--regrid_map})
Specifies a mapfile (i.e., weight-file) to remap the source to
destination grid.
If @var{map_fl} is specified in conjunction with any of the @samp{-d},
@@ -20945,7 +21287,11 @@ invoked by @command{ncremap}.
<a name="map_mlt"></a> <!-- http://nco.sf.net/nco.html#map_mlt -->
@end html
@cindex @code{-M}
- at item -M
+ at cindex @code{--mlt_map}
+ at cindex @code{--multimap}
+ at cindex @code{--no_multimap}
+ at cindex @code{--nomultimap}
+ at item -M (@code{--mlt_map}, @code{--multimap}, @code{--no_multimap}, @code{--nomultimap})
@command{ncremap} assumes that every input file is on a unique grid
unless a source gridfile is specified (with @samp{-s @var{grd_src}})
or multiple-mapfile generation is explicitly turned-off (with
@@ -20966,7 +21312,10 @@ tedious, and unnecessary when batch processing data on the same grids.
@end html
@cindex @code{-n @var{nco_opt}}
@cindex @var{nco_opt}
- at item -n @var{nco_opt}
+ at cindex @code{--nco_opt}
+ at cindex @code{--nco_options}
+ at cindex @code{--nco}
+ at item -n @var{nco_opt} (@code{--nco_opt}, @code{--nco_options}, @code{--nco})
Specifies a string of options to pass-through unaltered to
@command{ncks}.
@var{nco_opt} defaults to @samp{-O --no_tmp_fl}.
@@ -20976,7 +21325,12 @@ Specifies a string of options to pass-through unaltered to
@end html
@cindex @code{-O @var{out_drc}}
@cindex @var{out_drc}
- at item -O @var{out_drc}
+ at cindex @code{--out_drc}
+ at cindex @code{--drc_out}
+ at cindex @code{--dir_out}
+ at cindex @code{--out_dir}
+ at cindex @code{--output}
+ at item -O @var{out_drc} (@code{--out_drc}, @code{--drc_out}, @code{--dir_out}, @code{--out_dir}, @code{--output})
Specifies the output directory, i.e., the directory name to contain
the output file(s).
If @var{out_fl} is also specified, then the output filepath is
@@ -20991,7 +21345,11 @@ be in separate directories.
<a name="out_fl"></a> <!-- http://nco.sf.net/nco.html#out_fl -->
@end html
@cindex @code{-o @var{out_fl}}
- at item -o @var{out_fl}
+ at cindex @var{--out_fl}
+ at cindex @code{--out_fl}
+ at cindex @code{--output_file}
+ at cindex @code{--out_file}
+ at item -o @var{out_fl} (@code{--out_fl}, @code{--output_file}, @code{--out_file})
Specifies the output filename, i.e., the name of the file to contain
the data from @var{in_fl} remapped to the destination grid.
If @var{out_fl} already exists it will be overwritten.
@@ -21004,7 +21362,11 @@ using @samp{-I @var{in_drc}} or standard input) generates an error
@end html
@cindex @code{-P @var{pdq_typ}}
@cindex @var{pdq_typ}
- at item -P @var{pdq_typ}
+ at cindex @code{--pdq_typ}
+ at cindex @code{--prm_typ}
+ at cindex @code{--permutation}
+ at cindex @code{--permute}
+ at item -P @var{pdq_typ} (@code{--pdq_typ}, @code{--prm_typ}, @code{--permutation}, @code{--permute})
Specifies the permutation mode desired.
As of January~15, 2016, one can tell @command{ncremap} to automatically
tagpermute the dimensions in the data file prior to regridding for a
@@ -21025,7 +21387,12 @@ In @acronym{MPAS}-mode the order is
@end html
@cindex @code{-p @var{par_typ}}
@cindex @var{par_typ}
- at item -p @var{par_typ}
+ at cindex @code{--par_typ}
+ at cindex @code{--par_md}
+ at cindex @code{--parallel_type}
+ at cindex @code{--parallel_mode}
+ at cindex @code{--parallel}
+ at item -p @var{par_typ} (@code{--}, @code{--}, @code{--}, @code{--})
Specifies the parallelism mode desired.
Parallelism accelerates throughput when regridding multiple files in one
@command{ncremap} invocation.
@@ -21052,7 +21419,9 @@ when all source files share the same grid.
@end html
@cindex @code{-R @var{rgr_opt}}
@cindex @var{rgr_opt}
- at item -R @var{rgr_opt}
+ at cindex @code{--rgr_opt}
+ at cindex @code{--regrid_options}
+ at item -R @var{rgr_opt} (@code{--rgr_opt}, @code{--regrid_options})
@command{ncremap} passes @var{rgr_opt} directly through to the
regridder.
This is useful to customize output grids and metadata.
@@ -21065,7 +21434,11 @@ i.e., by default @command{ncremap} always names latitude and longitude
@end html
@cindex @code{-s @var{grd_src}}
@cindex @var{grd_src}
- at item -s @var{grd_src}
+ at cindex @code{--grd_src}
+ at cindex @code{--grid_source}
+ at cindex @code{--source_grid}
+ at cindex @code{--src_grd}
+ at item -s @var{grd_src} (@code{--grd_src}, @code{--grid_source}, @code{--source_grid}, @code{--src_grd})
Specifies the source gridfile in @acronym{SCRIP} format.
@acronym{NCO} will use @acronym{ERWG} to combine this with a destination
gridfile (either inferred from @var{dst_fl}, or generated by supplying a
@@ -21087,7 +21460,11 @@ Thus @acronym{NCO} will call @acronym{ERWG} only once, and will use that
@end html
@cindex @code{-T @var{tempest_opt}}
@cindex @var{tempest_opt}
- at item -T @var{tempest_opt}
+ at cindex @code{--tempest_opt}
+ at cindex @code{--tps_opt}
+ at cindex @code{--tempest}
+ at cindex @code{--tempest_options}
+ at item -T @var{tempest_opt} (@code{--tps_opt}, @code{--tempest_opt}, @code{--tempest}, @code{--tempest_options})
@command{ncremap} passes @var{tempest_opt} directly through to
@command{GenerateOfflineMap} (not to @command{GenerateOverlapMesh}).
The user-specified contents of @var{tempest_opt} supercede its default
@@ -21108,7 +21485,10 @@ to @acronym{FV} maps, pass @samp{-T "--mono"}.
@end html
@cindex @code{-t @var{thr_nbr}}
@cindex @var{thr_nbr}
- at item -t @var{thr_nbr}
+ at cindex @code{--thr_nbr}
+ at cindex @code{--thread_number}
+ at cindex @code{--threads}
+ at item -t @var{thr_nbr} (@code{--thr_nbr}, @code{--thread_number}, @code{--threads})
Specifies the number of threads used per regridding process
(@pxref{OpenMP Threading}).
The @acronym{NCO} regridder scales well up to 8--16 threads.
@@ -21118,7 +21498,12 @@ The @acronym{NCO} regridder scales well up to 8--16 threads.
@end html
@cindex @code{-U @var{tmp_drc}}
@cindex @var{tmp_drc}
- at item -U @var{tmp_drc}
+ at cindex @code{--tmp_drc}
+ at cindex @code{--drc_tmp}
+ at cindex @code{--tmp_dir}
+ at cindex @code{--dir_tmp}
+ at cindex @code{--tmp}
+ at item -U @var{tmp_drc} (@code{--tmp_drc}, @code{--drc_tmp}, @code{--tmp_dir}, @code{--dir_tmp}, @code{--tmp_drc})
Specifies the directory in which to place intermediate output files.
Depending on how it is invoked, @command{ncremap} may generate
a few or many intermediate files (grids and maps) that it will, by
@@ -21135,7 +21520,10 @@ or else it uses the current working director (@code{$PWD}).
@cindex @code{-u @var{unq_sfx}}
@cindex @var{unq_sfx}
@cindex noclean
- at item -u @var{unq_sfx}
+ at cindex @code{--unq_sfx}
+ at cindex @code{--unique_suffix}
+ at cindex @code{--suffix}
+ at item -u @var{unq_sfx} (@code{--unq_sfx}, @code{--unique_suffix}, @code{--suffix})
Specifies the suffix used to label intermediate (internal) files
generated by the regridding workflow.
Unique names are required to avoid interference among parallel
@@ -21157,7 +21545,10 @@ If @var{unq_sfx} is @samp{noclean} then @command{ncremap} retains
@end html
@cindex @code{-v @var{var_lst}}
@cindex @var{var_lst}
- at item -v @var{var_lst}
+ at cindex @code{--var_lst}
+ at cindex @code{--variable_list}
+ at cindex @code{--variables}
+ at item -v @var{var_lst} (@code{--var_lst}, @code{--variable_list}, @code{--variables})
The @samp{-v} option causes @command{ncremap} to regrid only the
variables in @var{var_lst}.
It behaves like subsetting (@pxref{Subsetting Files}) in the rest of
@@ -21168,10 +21559,15 @@ It behaves like subsetting (@pxref{Subsetting Files}) in the rest of
@end html
@cindex @code{-V @var{rgr_var}}
@cindex @var{rgr_var}
- at item -V @var{rgr_var}
+ at cindex @code{--rgr_var}
+ at cindex @code{--var_rgr}
+ at cindex @code{--var_cf}
+ at cindex @code{--cf_var}
+ at cindex @code{--cf_variable}
+ at item -V @var{var_rgr} (@code{--var_rgr}, @code{--rgr_var}, @code{--var_cf}, @code{--cf_var}, @code{cf_variable})
The @samp{-V} option tells @command{ncremap} to use the same grid as
- at var{rgr_var} in the input file.
-If @var{rgr_var} adheres to the @acronym{CF} @code{coordinates}
+ at var{var_rgr} in the input file.
+If @var{var_rgr} adheres to the @acronym{CF} @code{coordinates}
convention described
@uref{http://cfconventions.org/1.6.html#coordinate-system, here},
then @command{ncclimo} will infer the grid as represented by those
@@ -21184,16 +21580,16 @@ Until @acronym{NCO} version 4.6.0 (May, 2016), @command{ncremap} would
not follow @acronym{CF} conventions to identify coordinate variables.
Instead, @command{ncremap} used an internal database of ``usual
suspects'' to identify latitude and longitude coordinate variables.
-Now, if @var{rgr_var} is @acronym{CF}-compliant, then @command{ncremap}
+Now, if @var{var_rgr} is @acronym{CF}-compliant, then @command{ncremap}
will automatically identify the horizontal spatial dimensions.
-If @var{rgr_var} is supplied but is not @acronym{CF}-compliant, then
+If @var{var_rgr} is supplied but is not @acronym{CF}-compliant, then
@command{ncremap} will use its internal database to identify
horizontal spatial dimensions.
If both these automated methods fail, manually supply @command{ncremap}
with the names of the horizontal spatial dimensions
@example
# Method used to obtain horizontal spatial coordinates:
-ncremap -V rgr_var -i in.nc -d dst.nc -O ~/rgr # CF coordinates convention
+ncremap -V var_rgr -i in.nc -d dst.nc -O ~/rgr # CF coordinates convention
ncremap -i in.nc -d dst.nc -O ~/rgr # Internal database
ncremap -R "--rgr lat_nm=xq --rgr lon_nm=zj" -i in.nc -d dst.nc -O ~/rgr # Manual
@end example
@@ -21204,7 +21600,9 @@ ncremap -R "--rgr lat_nm=xq --rgr lon_nm=zj" -i in.nc -d dst.nc -O ~/rgr # Manua
@end html
@cindex @code{-w @var{wgt_gnr}}
@cindex @var{wgt_gnr}
- at item -w @var{wgt_gnr}
+ at cindex @code{--wgt_gnr}
+ at cindex @code{--weight_generator}
+ at item -w @var{wgt_gnr} (@code{--wgt_gnr}, @code{--weight_generator}, @code{--generator})
Specifies the weight-generator to use when a map-file is not provided.
The @var{wgt_gnr} argument must be @code{esmf} for @acronym{ESMF}'s
@command{ESMF_RegridWeightGen} (@acronym{ERWG}), or
@@ -21218,7 +21616,12 @@ is not permitted (since the weight-generator would not be used).
@end html
@cindex @code{-v @var{xtn_lst}}
@cindex @var{xtn_lst}
- at item -x @var{xtn_lst}
+ at cindex @code{--xtn_lst}
+ at cindex @code{--xtn_var}
+ at cindex @code{--var_xtn}
+ at cindex @code{--extensive}
+ at cindex @code{--extensive_variables}
+ at item -x @var{xtn_lst} (@code{--xtn_lst}, @code{--xtn_var}, @code{--var_xtn}, @code{--extensive}, @code{--extensive_variables})
The @samp{-x} option causes @command{ncremap} to treat the variables in
@var{xtn_lst} as @dfn{extensive}, meaning that their value depends on
the gridcell boundaries.
@@ -22530,7 +22933,8 @@ Debian packaging
RedHat packaging
@cindex George Shapavalov
@cindex Patrick Kursawe
- at item George Shapavalov, Patrick Kursawe
+ at cindex Manfred Schwarb
+ at item George Shapavalov, Patrick Kursawe, Manfred Schwarb
Gentoo packaging
@cindex Filipe Fernandes
@item Filipe Fernandes
diff --git a/doc/nco_news.shtml b/doc/nco_news.shtml
index b0a1de2..e458dc9 100644
--- a/doc/nco_news.shtml
+++ b/doc/nco_news.shtml
@@ -145,7 +145,7 @@ Thus DODS-enabled NCO can operate on remote files accessible through
any DODS server without transferring the files.
Only the required data (e.g., the variable or hyperslab specified) are
transferred.
-See the <a href="./nco.html">User's Guide</a> for complete details.</li>
+See the <a href="./nco.html">Users Guide</a> for complete details.</li>
<li>Fortran compatibility has been dropped.
If a volunteer wants to continue to maintain the option to perform
@@ -232,7 +232,7 @@ and
<a href="http://www.cgd.ucar.edu/csm/support/Document/shows/nco/nco_files/v3_document.htm">slide presentation</a>
she developed to introduce and to summarize the NCO operators.
The pamphlet is an excellent primer on NCO's capabilities, though not
-as definitive as the <a href="./nco.html">User's Guide</a>.
+as definitive as the <a href="./nco.html">Users Guide</a>.
<a name="20000305"></a>
<p>2000/03/05:
@@ -255,7 +255,7 @@ License (GPL).
<p>1999/12/26:
Added support for printing character arrays as strings in <tt>ncks</tt>.
Added documentation and examples of stride capability to <tt>ncks</tt>,
-<tt>ncra</tt>, and <tt>ncrcat</tt> portions of User's Guide.
+<tt>ncra</tt>, and <tt>ncrcat</tt> portions of Users Guide.
<a name="19991214"></a>
<p>1999/12/14:
@@ -263,7 +263,7 @@ Added feature to <tt>ncatted</tt> so that C-language escape sequences
may be used in editing character attributes.
Text formatting of attributes now supports <tt>\n</tt>, <tt>\t</tt>,
and most other <tt>printf</tt>-style sequences.
-See the <a href="./nco.html">User's Guide</a> for details.
+See the <a href="./nco.html">Users Guide</a> for details.
<a name="19991206"></a>
<p>1999/12/06:
@@ -303,7 +303,7 @@ Now when <tt>ncwa</tt> is invoked without any user-specified
dimensions, it averages over all dimensions.
Thus the behavior of <tt>-a</tt> is now analogous to that of
<tt>-d</tt> and of <tt>-v</tt>.
-See the <a href="./nco.html">User's Guide</a> for details.
+See the <a href="./nco.html">Users Guide</a> for details.
<a name="19990829"></a>
<P>1999/08/29:
@@ -316,7 +316,7 @@ maintain on the wide variety of platforms supported by NCO.
Now pure C builds are the default and Fortran arithmetic must be
manually enabled by specifying -DUSE_FORTRAN_ARITHMETIC in the build
environment.
-See the <A HREF="http://www.cgd.ucar.edu/cms/nco/nco.html">User's Guide</A> for details.
+See the <A HREF="http://www.cgd.ucar.edu/cms/nco/nco.html">Users Guide</A> for details.
The Fortran code is no longer supported and will probably be
deprecated in future releases.
@@ -330,7 +330,7 @@ unit when <TT>msrcp</TT> is available on the local system.
This change is transparent to the user, so that commands should still
specify the paths to the files on the mass store with, e.g.,
<TT>ncks -O -R -l ./ /ZENDER/nc/in.nc foo.nc</TT>.
-See the <A HREF="http://www.cgd.ucar.edu/cms/nco/nco.html">User's Guide</A> for more details.
+See the <A HREF="http://www.cgd.ucar.edu/cms/nco/nco.html">Users Guide</A> for more details.
<a name="19990703"></a>
<P>1999/07/03:
@@ -366,7 +366,7 @@ course.
<TT>ncrcat</TT> and <TT>ncra</TT> now support index-based
hyperslabbing in the record dimension across files, e.g.,
<TT>ncra -d time,1,100 in1.nc in2.nc ... out.nc</TT>
-The <A HREF="http://www.cgd.ucar.edu/cms/nco/nco.html">User's Guide</A> claimed this feature had already been implemented,
+The <A HREF="http://www.cgd.ucar.edu/cms/nco/nco.html">Users Guide</A> claimed this feature had already been implemented,
but, due to an oversight, that was not true.
Attempting to use this new feature in older versions of NCO resulted
in an "index out of range" error.
@@ -413,7 +413,7 @@ until their implementation is simplified and redesigned. The
<TT>`-N'</TT> option to <TT>ncwa</TT> remains the same. All the new
behavior is fully documented in the rewritten and expanded
<TT>ncwa</TT> section in the
-<A HREF="http://www.cgd.ucar.edu/cms/nco/nco.html">NCO User's
+<A HREF="http://www.cgd.ucar.edu/cms/nco/nco.html">NCO Users
Guide</A>.
<a name="19981104"></a>
diff --git a/doc/nco_src_frg.txt b/doc/nco_src_frg.txt
index 1da4378..6012a76 100644
--- a/doc/nco_src_frg.txt
+++ b/doc/nco_src_frg.txt
@@ -120,7 +120,7 @@ ncks) takes netCDF or HDF input file(s), performs an operation (e.g.,
averaging, hyperslabbing, or renaming), and outputs a processed netCDF
file. Although most users of netCDF and HDF data are involved in
scientific research, these data formats, and thus NCO, are generic and
-are equally useful in fields like finance. The NCO User's Guide
+are equally useful in fields like finance. The NCO Users Guide
illustrates NCO use with examples from the field of climate modeling
and analysis. The NCO homepage is currently
http://nco.sf.net
diff --git a/doc/xmp_cesm.html b/doc/xmp_cesm.html
index a21b247..3614048 100644
--- a/doc/xmp_cesm.html
+++ b/doc/xmp_cesm.html
@@ -42,7 +42,7 @@ To begin, we have documented below a prototypical example of CMIP5
analysis and evaluation using traditional NCO commands on
netCDF3-format model and HDF-EOS format observational (NASA
MODIS satellite instrument) datasets.
-These examples complement the NCO User's Guide by detailing
+These examples complement the NCO Users Guide by detailing
in-depth data analysis in a frequently encountered “real
world” context.
Graphical representations of the results (NCL scripts available upon
diff --git a/man/ncap.1 b/man/ncap.1
index 29fe49a..5cceb51 100644
--- a/man/ncap.1
+++ b/man/ncap.1
@@ -117,13 +117,13 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
The full documentation for
.B NCO
is maintained as a Texinfo manual called the
-.B NCO User's Guide.
+.B NCO Users Guide.
Because
.B NCO
is mathematical in nature, the documentation includes TeX-intensive
portions not viewable on character-based displays.
Hence the only complete and authoritative versions of the
-.B NCO User's Guide
+.B NCO Users Guide
are the PDF (recommended), DVI, and Postscript versions at
<http://nco.sf.net/nco.pdf>, <http://nco.sf.net/nco.dvi>,
and <http://nco.sf.net/nco.ps>, respectively.
diff --git a/man/ncap2.1 b/man/ncap2.1
index 1d5bcf8..821594c 100644
--- a/man/ncap2.1
+++ b/man/ncap2.1
@@ -35,7 +35,7 @@ ncap2 [\-3] [\-4] [\-6] [\-7] [\-A] [\-\-bfr
.SH DESCRIPTION
.PP
.B ncap2
-supercedes and is backwards-compatible with
+supersedes and is backwards-compatible with
.B ncap
which is now deprecated.
Both operators arithmetically process netCDF files.
@@ -121,13 +121,13 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
The full documentation for
.B NCO
is maintained as a Texinfo manual called the
-.B NCO User's Guide.
+.B NCO Users Guide.
Because
.B NCO
is mathematical in nature, the documentation includes TeX-intensive
portions not viewable on character-based displays.
Hence the only complete and authoritative versions of the
-.B NCO User's Guide
+.B NCO Users Guide
are the PDF (recommended), DVI, and Postscript versions at
<http://nco.sf.net/nco.pdf>, <http://nco.sf.net/nco.dvi>,
and <http://nco.sf.net/nco.ps>, respectively.
diff --git a/man/ncatted.1 b/man/ncatted.1
index 5353fb7..1380288 100644
--- a/man/ncatted.1
+++ b/man/ncatted.1
@@ -516,13 +516,13 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
The full documentation for
.B NCO
is maintained as a Texinfo manual called the
-.B NCO User's Guide.
+.B NCO Users Guide.
Because
.B NCO
is mathematical in nature, the documentation includes TeX-intensive
portions not viewable on character-based displays.
Hence the only complete and authoritative versions of the
-.B NCO User's Guide
+.B NCO Users Guide
are the PDF (recommended), DVI, and Postscript versions at
<http://nco.sf.net/nco.pdf>, <http://nco.sf.net/nco.dvi>,
and <http://nco.sf.net/nco.ps>, respectively.
diff --git a/man/ncbo.1 b/man/ncbo.1
index dc32e40..4ec1162 100644
--- a/man/ncbo.1
+++ b/man/ncbo.1
@@ -364,13 +364,13 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
The full documentation for
.B NCO
is maintained as a Texinfo manual called the
-.B NCO User's Guide.
+.B NCO Users Guide.
Because
.B NCO
is mathematical in nature, the documentation includes TeX-intensive
portions not viewable on character-based displays.
Hence the only complete and authoritative versions of the
-.B NCO User's Guide
+.B NCO Users Guide
are the PDF (recommended), DVI, and Postscript versions at
<http://nco.sf.net/nco.pdf>, <http://nco.sf.net/nco.dvi>,
and <http://nco.sf.net/nco.ps>, respectively.
diff --git a/man/ncclimo.1 b/man/ncclimo.1
index 0cf2bbd..81b1470 100644
--- a/man/ncclimo.1
+++ b/man/ncclimo.1
@@ -8,13 +8,15 @@ ncclimo \- netCDF Climatology Generator
.SH SYNTAX
ncclimo
[\-a
+.IR dec_md ]
+[\-C
.IR clm_md ]
-[\-b
-.IR bnd_nm ]
[\-c
.IR caseid ]
[\-d
.IR dbg_lvl ]
+[\-E
+.IR yr_prv ]
[\-e
.IR yr_end ]
[\-f
@@ -85,13 +87,13 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
The full documentation for
.B NCO
is maintained as a Texinfo manual called the
-.B NCO User's Guide.
+.B NCO Users Guide.
Because
.B NCO
is mathematical in nature, the documentation includes TeX-intensive
portions not viewable on character-based displays.
Hence the only complete and authoritative versions of the
-.B NCO User's Guide
+.B NCO Users Guide
are the PDF (recommended), DVI, and Postscript versions at
<http://nco.sf.net/nco.pdf>, <http://nco.sf.net/nco.dvi>,
and <http://nco.sf.net/nco.ps>, respectively.
diff --git a/man/ncecat.1 b/man/ncecat.1
index 784a7de..ad91183 100644
--- a/man/ncecat.1
+++ b/man/ncecat.1
@@ -150,13 +150,13 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
The full documentation for
.B NCO
is maintained as a Texinfo manual called the
-.B NCO User's Guide.
+.B NCO Users Guide.
Because
.B NCO
is mathematical in nature, the documentation includes TeX-intensive
portions not viewable on character-based displays.
Hence the only complete and authoritative versions of the
-.B NCO User's Guide
+.B NCO Users Guide
are the PDF (recommended), DVI, and Postscript versions at
<http://nco.sf.net/nco.pdf>, <http://nco.sf.net/nco.dvi>,
and <http://nco.sf.net/nco.ps>, respectively.
diff --git a/man/nces.1 b/man/nces.1
index 4ac5360..c686f8b 100644
--- a/man/nces.1
+++ b/man/nces.1
@@ -163,13 +163,13 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
The full documentation for
.B NCO
is maintained as a Texinfo manual called the
-.B NCO User's Guide.
+.B NCO Users Guide.
Because
.B NCO
is mathematical in nature, the documentation includes TeX-intensive
portions not viewable on character-based displays.
Hence the only complete and authoritative versions of the
-.B NCO User's Guide
+.B NCO Users Guide
are the PDF (recommended), DVI, and Postscript versions at
<http://nco.sf.net/nco.pdf>, <http://nco.sf.net/nco.dvi>,
and <http://nco.sf.net/nco.ps>, respectively.
diff --git a/man/ncflint.1 b/man/ncflint.1
index 6495647..830242c 100644
--- a/man/ncflint.1
+++ b/man/ncflint.1
@@ -200,13 +200,13 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
The full documentation for
.B NCO
is maintained as a Texinfo manual called the
-.B NCO User's Guide.
+.B NCO Users Guide.
Because
.B NCO
is mathematical in nature, the documentation includes TeX-intensive
portions not viewable on character-based displays.
Hence the only complete and authoritative versions of the
-.B NCO User's Guide
+.B NCO Users Guide
are the PDF (recommended), DVI, and Postscript versions at
<http://nco.sf.net/nco.pdf>, <http://nco.sf.net/nco.dvi>,
and <http://nco.sf.net/nco.ps>, respectively.
diff --git a/man/ncks.1 b/man/ncks.1
index 148eb73..c4ded6a 100644
--- a/man/ncks.1
+++ b/man/ncks.1
@@ -41,8 +41,8 @@ dbg_lvl]
[\-H] [\-h] [\-\-hdn]
[\-\-hdr_pad
.IR sz_byt ]
-[\-L
-.IR dfl_lvl ]
+[\-\-json] [\-\-jsn_fmt
+.IR lvl ]
[\-l
.IR path ]
[\-M] [\-m] [\-\-map
@@ -632,13 +632,13 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
The full documentation for
.B NCO
is maintained as a Texinfo manual called the
-.B NCO User's Guide.
+.B NCO Users Guide.
Because
.B NCO
is mathematical in nature, the documentation includes TeX-intensive
portions not viewable on character-based displays.
Hence the only complete and authoritative versions of the
-.B NCO User's Guide
+.B NCO Users Guide
are the PDF (recommended), DVI, and Postscript versions at
<http://nco.sf.net/nco.pdf>, <http://nco.sf.net/nco.dvi>,
and <http://nco.sf.net/nco.ps>, respectively.
diff --git a/man/nco.1 b/man/nco.1
index 13747a4..1213a22 100644
--- a/man/nco.1
+++ b/man/nco.1
@@ -34,7 +34,7 @@ allows users to manipulate and analyze
files interactively and with simple scripts, avoiding the overhead
(and some of the power) of a higher level programming environment.
The
-.B NCO User's Guide
+.B NCO Users Guide
illustrates their use
with examples from the field of climate modeling and analysis.
.PP
@@ -135,13 +135,13 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
The full documentation for
.B NCO
is maintained as a Texinfo manual called the
-.B NCO User's Guide.
+.B NCO Users Guide.
Because
.B NCO
is mathematical in nature, the documentation includes TeX-intensive
portions not viewable on character-based displays.
Hence the only complete and authoritative versions of the
-.B NCO User's Guide
+.B NCO Users Guide
are the PDF (recommended), DVI, and Postscript versions at
<http://nco.sf.net/nco.pdf>, <http://nco.sf.net/nco.dvi>,
and <http://nco.sf.net/nco.ps>, respectively.
diff --git a/man/ncpdq.1 b/man/ncpdq.1
index 6e67c0d..2eb5335 100644
--- a/man/ncpdq.1
+++ b/man/ncpdq.1
@@ -148,13 +148,13 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
The full documentation for
.B NCO
is maintained as a Texinfo manual called the
-.B NCO User's Guide.
+.B NCO Users Guide.
Because
.B NCO
is mathematical in nature, the documentation includes TeX-intensive
portions not viewable on character-based displays.
Hence the only complete and authoritative versions of the
-.B NCO User's Guide
+.B NCO Users Guide
are the PDF (recommended), DVI, and Postscript versions at
<http://nco.sf.net/nco.pdf>, <http://nco.sf.net/nco.dvi>,
and <http://nco.sf.net/nco.ps>, respectively.
diff --git a/man/ncra.1 b/man/ncra.1
index 19c711b..671adb1 100644
--- a/man/ncra.1
+++ b/man/ncra.1
@@ -177,13 +177,13 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
The full documentation for
.B NCO
is maintained as a Texinfo manual called the
-.B NCO User's Guide.
+.B NCO Users Guide.
Because
.B NCO
is mathematical in nature, the documentation includes TeX-intensive
portions not viewable on character-based displays.
Hence the only complete and authoritative versions of the
-.B NCO User's Guide
+.B NCO Users Guide
are the PDF (recommended), DVI, and Postscript versions at
<http://nco.sf.net/nco.pdf>, <http://nco.sf.net/nco.dvi>,
and <http://nco.sf.net/nco.ps>, respectively.
diff --git a/man/ncrcat.1 b/man/ncrcat.1
index dae00e4..4a5186d 100644
--- a/man/ncrcat.1
+++ b/man/ncrcat.1
@@ -194,13 +194,13 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
The full documentation for
.B NCO
is maintained as a Texinfo manual called the
-.B NCO User's Guide.
+.B NCO Users Guide.
Because
.B NCO
is mathematical in nature, the documentation includes TeX-intensive
portions not viewable on character-based displays.
Hence the only complete and authoritative versions of the
-.B NCO User's Guide
+.B NCO Users Guide
are the PDF (recommended), DVI, and Postscript versions at
<http://nco.sf.net/nco.pdf>, <http://nco.sf.net/nco.dvi>,
and <http://nco.sf.net/nco.ps>, respectively.
diff --git a/man/ncremap.1 b/man/ncremap.1
index dab36ca..2af97fb 100644
--- a/man/ncremap.1
+++ b/man/ncremap.1
@@ -95,13 +95,13 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
The full documentation for
.B NCO
is maintained as a Texinfo manual called the
-.B NCO User's Guide.
+.B NCO Users Guide.
Because
.B NCO
is mathematical in nature, the documentation includes TeX-intensive
portions not viewable on character-based displays.
Hence the only complete and authoritative versions of the
-.B NCO User's Guide
+.B NCO Users Guide
are the PDF (recommended), DVI, and Postscript versions at
<http://nco.sf.net/nco.pdf>, <http://nco.sf.net/nco.dvi>,
and <http://nco.sf.net/nco.ps>, respectively.
diff --git a/man/ncrename.1 b/man/ncrename.1
index 9feae29..10dfe79 100644
--- a/man/ncrename.1
+++ b/man/ncrename.1
@@ -79,7 +79,7 @@ In this case, the named dependent variable must be one-dimensional and
should have no missing values.
Such a variable will become a coordinate variable.
.PP
-According to the netCDF User's Guide, renaming properties in
+According to the netCDF Users Guide, renaming properties in
netCDF files does not incur the penalty of recopying the entire file
when the
.I new_name
@@ -217,13 +217,13 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
The full documentation for
.B NCO
is maintained as a Texinfo manual called the
-.B NCO User's Guide.
+.B NCO Users Guide.
Because
.B NCO
is mathematical in nature, the documentation includes TeX-intensive
portions not viewable on character-based displays.
Hence the only complete and authoritative versions of the
-.B NCO User's Guide
+.B NCO Users Guide
are the PDF (recommended), DVI, and Postscript versions at
<http://nco.sf.net/nco.pdf>, <http://nco.sf.net/nco.dvi>,
and <http://nco.sf.net/nco.ps>, respectively.
diff --git a/man/ncwa.1 b/man/ncwa.1
index 8007bad..263b003 100644
--- a/man/ncwa.1
+++ b/man/ncwa.1
@@ -233,13 +233,13 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
The full documentation for
.B NCO
is maintained as a Texinfo manual called the
-.B NCO User's Guide.
+.B NCO Users Guide.
Because
.B NCO
is mathematical in nature, the documentation includes TeX-intensive
portions not viewable on character-based displays.
Hence the only complete and authoritative versions of the
-.B NCO User's Guide
+.B NCO Users Guide
are the PDF (recommended), DVI, and Postscript versions at
<http://nco.sf.net/nco.pdf>, <http://nco.sf.net/nco.dvi>,
and <http://nco.sf.net/nco.ps>, respectively.
diff --git a/src/nco++/Makefile.old b/src/nco++/Makefile.old
index 09290e1..f51e11a 100644
--- a/src/nco++/Makefile.old
+++ b/src/nco++/Makefile.old
@@ -30,7 +30,7 @@
# 64-bit ABI netCDF3 on NCAR AIX systems (bluefire):
# cd ${HOME}/nco/src/nco++;ANTLR='/contrib/antlr-2.7.7/bin/antlr' ANTLR_ROOT='/contrib/antlr-2.7.7' NETCDF_LIB='/usr/local/lib64/r4i4' UDUNITS_INC='/contrib/udunits-1.12.9/include' UDUNITS_LIB='/contrib/udunits-1.12.9/lib' make -f Makefile.old --jobs=1 ABI=64 OPTS=D NETCDF4=N UDUNITS=N all;cd -
# 64-bit ABI netCDF4 on NCAR AIX systems (bluefire):
-# cd ${HOME}/nco/src/nco++;ANTLR='/contrib/antlr-2.7.7/bin/antlr' ANTLR_ROOT='/contrib/antlr-2.7.7' CURL_LIB='/contrib/curl/7.21.2/lib' GSL_INC='/contrib/gsl-1.12/include' GSL_LIB='/contrib/gsl-1.12/lib' GSL_MINOR_VERSION='12' HDF5_ROOT='/contrib/hdf5-1.8.7_seq' LDFLAGS='-lnetcdf -lhdf5_hl -lhdf5 -lz' NETCDF_ROOT='/contrib/netcdf/4.1.3_seq' SZ_LIB='/contrib/szip/lib' UDUNITS_LIB='/contrib/zlib/lib' make -f Makefile.old --jobs=1 ABI=64 OPTS=D SZ=Y all;cd -
+# cd ${HOME}/nco/src/nco++;ANTLR='/contrib/antlr-2.7.7/bin/antlr' ANTLR_ROOT='/contrib/antlr-2.7.7' CURL_LIB='/contrib/curl/7.21.2/lib' GSL_INC='/contrib/gsl-1.12/include' GSL_LIB='/contrib/gsl-1.12/lib' GSL_MAJOR_VERSION='1' GSL_MINOR_VERSION='12' HDF5_ROOT='/contrib/hdf5-1.8.7_seq' LDFLAGS='-lnetcdf -lhdf5_hl -lhdf5 -lz' NETCDF_ROOT='/contrib/netcdf/4.1.3_seq' SZ_LIB='/contrib/szip/lib' UDUNITS_LIB='/contrib/zlib/lib' make -f Makefile.old --jobs=1 ABI=64 OPTS=D SZ=Y all;cd -
#
# Cygwin on Windows Vista systems:
# cd ~/nco/src/nco++;ANTLR='antlr' make -f Makefile.old GSL=Y OPTS=D NETCDF4=Y UDUNITS=Y all;cd -
@@ -226,7 +226,7 @@ else
endif # endif PRC
# NB: CPP_TKN is recursively expanded variable, define only when components are ready
-CPP_TKN = ${USR_TKN} -D${PVM_ARCH} -DNO_NETCDF_2 -DVERSION='${VRS_SNG}' -DHOSTNAME='${HOST}' -DUSER='${USER}' -DNCO_ABORT_ON_ERROR
+CPP_TKN = ${USR_TKN} -D${PVM_ARCH} -DNO_NETCDF_2 -DVERSION='${VRS_SNG}' -DHOSTNAME='${HOSTNAME}' -DUSER='${USER}' -DNCO_ABORT_ON_ERROR
ifndef LFLAGS # Flags for Flex (Lex)
LFLAGS := -P${NCO_YY_PFX}
endif
@@ -333,10 +333,24 @@ ifdef GSL_LIB
else
GSL_LIB_FLG := $(shell gsl-config --libs)
endif # endif GSL_LIB
- ifndef GSL_MINOR_VERSION
- GSL_MINOR_VERSION := $(shell gsl-config --version | sed s/^[1-9]\.// )
+ifndef GSL_MAJOR_VERSION
+# 20161118: Following line is always accurate
+ GSL_MAJOR_VERSION := $(shell gsl-config --version | cut -c 1 )
+endif # endif GSL_MAJOR_VERSION
+ifndef GSL_MINOR_VERSION
+# gsl-config --version: 1.16 (old GSL versioning)
+# gsl-config --version: 2.2.1 (new GSL versioning)
+# GSL_MINOR_VERSION := $(shell gsl-config --version )
+ ifeq (${GSL_MAJOR_VERSION},2)
+ GSL_MINOR_VERSION := $(shell gsl-config --version | cut -c 3 )
+ GSL_PATCH_VERSION := $(shell gsl-config --version | cut -c 5 )
+ else # !GSL_MAJOR_VERSION
+ GSL_MINOR_VERSION := $(shell gsl-config --version | sed s/^[1-9]\.// )
+ GSL_PATCH_VERSION := 0
+ endif # !GSL_MAJOR_VERSION
ifeq (${GSL_MINOR_VERSION},${null})
- GSL_MINOR_VERSION := 4
+ GSL_MINOR_VERSION := 4
+ GSL_PATCH_VERSION := 0
endif # end if GSL_MINOR_VERSION
endif # endif GSL_MINOR_VERSION
endif # end if GSL
@@ -529,7 +543,7 @@ ifeq (${DAP},Y)
CPP_TKN_OS += -DENABLE_DAP
endif # !DAP
ifeq (${GSL},Y)
- CPP_TKN_OS += -DENABLE_GSL -DHAVE_GSL_H -DNCO_GSL_MINOR_VERSION='${GSL_MINOR_VERSION}'
+ CPP_TKN_OS += -DENABLE_GSL -DHAVE_GSL_H -DNCO_GSL_MAJOR_VERSION='${GSL_MAJOR_VERSION}' -DNCO_GSL_MINOR_VERSION='${GSL_MINOR_VERSION}' -DNCO_GSL_PATCH_VERSION='${GSL_PATCH_VERSION}'
endif # !GSL
ifeq (${MPI},Y)
ifneq (${MPI_FAKE},Y)
@@ -752,6 +766,22 @@ endif
# Works on LINUX, LINUXALPHA, LINUXAMD64, LINUXARM, and FREEBSD and MACOSX
ifneq (${null},$(findstring ${PVM_ARCH},LINUXALPHALINUXAMD64LINUXARMFREEBSDMACOSX))
+# 20161001 Linux ar with Ubuntu Xenial began using deterministic mode. -U undoes that.
+# 20161001 GNU ar -s is equivalent to ranlib
+# ar -D: Operate in deterministic mode (breaks NCO build on Ubuntu)
+# ar -r: replace existing or insert new file(s) into the archive
+# ar -s: equivalent to ranlib
+# ar -U: Do not operate in deterministic mode. This is the inverse of the D modifier, above: added files and the archive index will get their actual UID, GID, timestamp, and file mode values.
+# ar -U: Unavailable option in RHEL 6 (2010) used on Rhea and Titan
+# ar -v: be verbose
+ARFLAGS := rsUv
+ifneq (${null},$(findstring rhea,${HOSTNAME}))
+ ARFLAGS := rsv
+else ifneq (${null},$(findstring titan,${HOSTNAME}))
+ ARFLAGS := rsv
+else ifneq (${null},$(findstring ys,${HOSTNAME}))
+ ARFLAGS := rsv
+endif # !rhea,titan,yellowston
CXX := ${LINUX_CXX}
CC := ${LINUX_CC}
CPP := ${CXX}
@@ -1398,6 +1428,7 @@ dbg:
@printf "ABI = ${ABI}\n"
@printf "ANTLR = ${ANTLR}\n"
@printf "AR = ${AR}\n"
+ @printf "ARFLAGS = ${ARFLAGS}\n"
@printf "BNR_SFX = ${BNR_SFX}\n"
@printf "CC = ${CC}\n"
@printf "CCACHE = ${CCACHE}\n"
@@ -1418,6 +1449,8 @@ dbg:
@printf "FL_FMT = ${FL_FMT}\n"
@printf "GCC_RCH_ARG = ${GCC_RCH_ARG}\n"
@printf "GSL = $(GSL)\n"
+ @printf "GSL_MINOR_VERSION = $(GSL_MINOR_VERSION)\n"
+ @printf "GSL_MAJOR_VERSION = $(GSL_MAJOR_VERSION)\n"
@printf "HOST = ${HOST}\n"
@printf "HOSTNAME = ${HOSTNAME}\n"
@printf "ICC_RCH_ARG = ${ICC_RCH_ARG}\n"
diff --git a/src/nco++/fmc_all_cls.cc b/src/nco++/fmc_all_cls.cc
index c50a9bb..8ed0a23 100644
--- a/src/nco++/fmc_all_cls.cc
+++ b/src/nco++/fmc_all_cls.cc
@@ -1621,63 +1621,59 @@ var_sct * bsc_cls::getdims_fnd(bool &is_mtd, std::vector<RefAST> &vtr_args, fmc_
}
-
-
//Sort Functions /***********************************/
- srt_cls::srt_cls(bool flg_dbg){
- //Populate only on constructor call
- if(fmc_vtr.empty()){
- fmc_vtr.push_back( fmc_cls("sort" , this,PASORT));
- fmc_vtr.push_back( fmc_cls("asort" , this,PASORT));
- fmc_vtr.push_back( fmc_cls("dsort" , this,PDSORT));
- fmc_vtr.push_back( fmc_cls("remap" , this,PREMAP));
- fmc_vtr.push_back( fmc_cls("unmap" , this,PUNMAP));
- fmc_vtr.push_back( fmc_cls("invert_map" , this,PIMAP));
-
-
- }
+srt_cls::srt_cls(bool flg_dbg){
+ //Populate only on constructor call
+ if(fmc_vtr.empty()){
+ fmc_vtr.push_back( fmc_cls("sort" , this,PASORT));
+ fmc_vtr.push_back( fmc_cls("asort" , this,PASORT));
+ fmc_vtr.push_back( fmc_cls("dsort" , this,PDSORT));
+ fmc_vtr.push_back( fmc_cls("remap" , this,PREMAP));
+ fmc_vtr.push_back( fmc_cls("unmap" , this,PUNMAP));
+ fmc_vtr.push_back( fmc_cls("invert_map" , this,PIMAP));
+
+
}
+}
- var_sct *srt_cls::fnd(RefAST expr, RefAST fargs,fmc_cls &fmc_obj, ncoTree &walker){
+var_sct *srt_cls::fnd(RefAST expr, RefAST fargs,fmc_cls &fmc_obj, ncoTree &walker){
const std::string fnc_nm("gsl_fit_cls::fnd");
- bool is_mtd;
- int fdx=fmc_obj.fdx(); //index
- RefAST tr;
- std::vector<RefAST> vtr_args;
-
-
- if(expr)
- vtr_args.push_back(expr);
-
- if(tr=fargs->getFirstChild()) {
- do
- vtr_args.push_back(tr);
- while(tr=tr->getNextSibling());
- }
-
-
- is_mtd=(expr ? true: false);
-
- switch(fdx){
- case PASORT:
- case PDSORT:
- return srt_fnd(is_mtd,vtr_args,fmc_obj,walker);
- break;
- case PREMAP:
- case PUNMAP:
- return mst_fnd(is_mtd,vtr_args,fmc_obj,walker);
- case PIMAP:
- return imap_fnd(is_mtd,vtr_args,fmc_obj,walker);
- break;
- }
-
-
+ bool is_mtd;
+ int fdx=fmc_obj.fdx(); //index
+ RefAST tr;
+ std::vector<RefAST> vtr_args;
+
+ if(expr)
+ vtr_args.push_back(expr);
+
+ if(tr=fargs->getFirstChild()) {
+ do
+ vtr_args.push_back(tr);
+ while(tr=tr->getNextSibling());
+ }
+
+ is_mtd=(expr ? true: false);
+
+ switch(fdx){
+ case PASORT:
+ case PDSORT:
+ return srt_fnd(is_mtd,vtr_args,fmc_obj,walker);
+ break;
+ case PREMAP:
+ case PUNMAP:
+ return mst_fnd(is_mtd,vtr_args,fmc_obj,walker);
+ case PIMAP:
+ return imap_fnd(is_mtd,vtr_args,fmc_obj,walker);
+ break;
+ // 20161205: Always return value to non-void functions: good practice and required by rpmlint
+ default:
+ return NULL;
+ break;
+ }
} // end gsl_fit_cls::fnd
-
-
var_sct * srt_cls::imap_fnd(bool &is_mtd, std::vector<RefAST> &args_vtr, fmc_cls &fmc_obj, ncoTree &walker){
const std::string fnc_nm("srt_cls::imap_fnd");
@@ -2719,16 +2715,17 @@ var_sct * srt_cls::mst_fnd(bool &is_mtd, std::vector<RefAST> &args_vtr, fmc_cls
//Bilinear Interpolation Functions /****************************************/
- bil_cls::bil_cls(bool flg_dbg){
- //Populate only on constructor call
- if(fmc_vtr.empty()){
- fmc_vtr.push_back( fmc_cls("bilinear_interp",this,PBIL_ALL));
- fmc_vtr.push_back( fmc_cls("bilinear_interp_wrap",this,PBIL_ALL_WRP));
+bil_cls::bil_cls(bool flg_dbg){
+ //Populate only on constructor call
+ if(fmc_vtr.empty()){
+ fmc_vtr.push_back( fmc_cls("bilinear_interp",this,PBIL_ALL));
+ fmc_vtr.push_back( fmc_cls("bilinear_interp_wrap",this,PBIL_ALL_WRP));
+
+ }
+}
- }
- }
- var_sct * bil_cls::fnd(RefAST expr, RefAST fargs,fmc_cls &fmc_obj, ncoTree &walker){
- const std::string fnc_nm("bil_cls::fnd");
+var_sct * bil_cls::fnd(RefAST expr, RefAST fargs,fmc_cls &fmc_obj, ncoTree &walker){
+ const std::string fnc_nm("bil_cls::fnd");
bool bwrp; //if tue then wrap X and Y coo-ordinates in grid
bool b_rev_y;
bool b_rev_x;
@@ -2738,94 +2735,82 @@ var_sct * srt_cls::mst_fnd(bool &is_mtd, std::vector<RefAST> &args_vtr, fmc_cls
int idx;
int nbr_dim;
var_sct *var_arr[6];
-
+
nc_type in_typ;
-
+
std::string susg;
std::string sfnm=fmc_obj.fnm();
std::string serr;
-
+
RefAST tr;
std::vector<RefAST> vtr_args;
// de-reference
prs_cls *prs_arg=walker.prs_arg;
vtl_typ lcl_typ;
-
+
fdx=fmc_obj.fdx();
-
-
+
if(expr)
- vtr_args.push_back(expr);
-
- if(tr=fargs->getFirstChild()) {
- do
- vtr_args.push_back(tr);
- while(tr=tr->getNextSibling());
- }
-
+ vtr_args.push_back(expr);
+
+ if(tr=fargs->getFirstChild()) {
+ do
+ vtr_args.push_back(tr);
+ while(tr=tr->getNextSibling());
+ }
+
nbr_args=vtr_args.size();
-
+
switch(fdx){
-
- case PBIL_ALL:
- in_nbr_args=nbr_args;
- susg="usage: var_out="+sfnm+"(Data_in, Data_out, X_out?, Y_out?, X_in?, Y_in?)";
- bwrp=false;
- break;
-
- case PBIL_ALL_WRP:
- in_nbr_args=nbr_args;
- susg="usage: var_out="+sfnm+"(Data_in, Data_out, X_out?, Y_out?, X_in?, Y_in?)";
- bwrp=true;
- break;
-
-
+
+ case PBIL_ALL:
+ in_nbr_args=nbr_args;
+ susg="usage: var_out="+sfnm+"(Data_in, Data_out, X_out?, Y_out?, X_in?, Y_in?)";
+ bwrp=false;
+ break;
+
+ case PBIL_ALL_WRP:
+ in_nbr_args=nbr_args;
+ susg="usage: var_out="+sfnm+"(Data_in, Data_out, X_out?, Y_out?, X_in?, Y_in?)";
+ bwrp=true;
+ break;
} // end switch
-
-
-
- if(in_nbr_args <2 ){
- serr="function requires at least two arguments. You have only supplied "+nbr2sng(in_nbr_args)+ " arguments\n";
- err_prn(sfnm,serr+susg);
- }
-
-
- if(in_nbr_args >6 &&!prs_arg->ntl_scn)
- wrn_prn(sfnm,"Function been called with more than "+ nbr2sng(in_nbr_args)+ "arguments");
-
- // process input args
- for(idx=0 ; idx<in_nbr_args; idx++)
- var_arr[idx]=walker.out(vtr_args[idx]);
-
- in_typ=var_arr[0]->type;
-
-
-
- // initial scan
- if(prs_arg->ntl_scn){
- var_arr[1]=nco_var_cnf_typ(in_typ,var_arr[1]);
- for(idx=0 ; idx<in_nbr_args ; idx++)
- if(idx !=1) nco_var_free(var_arr[idx]);
-
- return var_arr[1];
- }
-
-
-
- if(fdx==PBIL_ALL || fdx==PBIL_ALL_WRP){
- // recall input arguments in order
- // 0 - input data
- // 1 - output data
- // 2 - output X co-ordinate var
- // 3 - output Y co-ordinate var
- // 4 - input X co-ordinate var
- // 5 - input Y co-ordinate var
-
-
-
- if(in_nbr_args<4){
- if(var_arr[1]->nbr_dim <2 )
- err_prn(sfnm,"Output data variable "+std::string(var_arr[1]->nm) + " must have at least two dimensions ");
+
+ if(in_nbr_args <2 ){
+ serr="function requires at least two arguments. You have only supplied "+nbr2sng(in_nbr_args)+ " arguments\n";
+ err_prn(sfnm,serr+susg);
+ }
+
+ if(in_nbr_args >6 &&!prs_arg->ntl_scn)
+ wrn_prn(sfnm,"Function been called with more than "+ nbr2sng(in_nbr_args)+ "arguments");
+
+ // process input args
+ for(idx=0 ; idx<in_nbr_args; idx++)
+ var_arr[idx]=walker.out(vtr_args[idx]);
+
+ in_typ=var_arr[0]->type;
+
+ // initial scan
+ if(prs_arg->ntl_scn){
+ var_arr[1]=nco_var_cnf_typ(in_typ,var_arr[1]);
+ for(idx=0 ; idx<in_nbr_args ; idx++)
+ if(idx !=1) nco_var_free(var_arr[idx]);
+
+ return var_arr[1];
+ }
+
+ if(fdx==PBIL_ALL || fdx==PBIL_ALL_WRP){
+ // recall input arguments in order
+ // 0 - input data
+ // 1 - output data
+ // 2 - output X co-ordinate var
+ // 3 - output Y co-ordinate var
+ // 4 - input X co-ordinate var
+ // 5 - input Y co-ordinate var
+
+ if(in_nbr_args<4){
+ if(var_arr[1]->nbr_dim <2 )
+ err_prn(sfnm,"Output data variable "+std::string(var_arr[1]->nm) + " must have at least two dimensions ");
// get output co-ordinate vars
if(in_nbr_args <3)
@@ -2846,8 +2831,6 @@ var_sct * srt_cls::mst_fnd(bool &is_mtd, std::vector<RefAST> &args_vtr, fmc_cls
var_arr[5]=prs_arg->ncap_var_init(std::string(var_arr[0]->dim[1]->nm),true);
}
-
-
// convert all args to type double and then cast
for(idx=0 ; idx<6; idx++){
var_arr[idx]=nco_var_cnf_typ(NC_DOUBLE,var_arr[idx]);
@@ -2871,6 +2854,8 @@ var_sct * srt_cls::mst_fnd(bool &is_mtd, std::vector<RefAST> &args_vtr, fmc_cls
}
+ // 20161205: Always return value to non-void functions: good practice and required by rpmlint
+ return NULL;
} // end fnc
@@ -2885,7 +2870,6 @@ void bil_cls::clc_bil_fnc(var_sct *v_xin,var_sct *v_yin, var_sct *v_din, var_sct
long jdx;
long kdx;
-
// Sanity check for input/ooooutput data
if( v_xin->sz *v_yin->sz != v_din->sz)
err_prn(sfnm,"Dimension size mismatch with input variables\n");
@@ -4675,6 +4659,175 @@ var_sct *vlist_cls::push_fnd(bool &is_mtd, std::vector<RefAST> &vtr_args, fmc_cl
}
+
+
+//udunits Functions /***********************************/
+ udunits_cls::udunits_cls(bool flg_dbg){
+ //Populate only on constructor call
+ if(fmc_vtr.empty()){
+ fmc_vtr.push_back( fmc_cls("udunits",this,PUNITS1));
+
+ }
+ }
+
+ var_sct * udunits_cls::fnd(RefAST expr, RefAST fargs,fmc_cls &fmc_obj, ncoTree &walker){
+ const std::string fnc_nm("udunits_cls::fnd");
+ int fdx;
+ int nbr_args;
+ int nbr_dim;
+ int rcd;
+ long lret;
+ dmn_sct **dim;
+ var_sct *var=NULL_CEWI;
+ var_sct *var_ud_in=NULL_CEWI;
+ var_sct *var_ud_out=NULL_CEWI;
+ var_sct *var_cln=NULL_CEWI;
+ var_sct *var_ret;
+
+ std::string susg;
+ std::string sfnm=fmc_obj.fnm();
+ RefAST tr;
+ std::vector<RefAST> args_vtr;
+ std::vector<std::string> cst_vtr;
+
+ // de-reference
+ prs_cls *prs_arg=walker.prs_arg;
+ nc_type lcl_typ;
+
+ NcapVar *Nvar;
+
+
+ fdx=fmc_obj.fdx();
+
+
+ if(expr)
+ args_vtr.push_back(expr);
+
+ if(tr=fargs->getFirstChild()) {
+ do
+ args_vtr.push_back(tr);
+ while(tr=tr->getNextSibling());
+ }
+
+ nbr_args=args_vtr.size();
+
+ susg="usage: var_out="+sfnm+"(var_in ,unitsOutString)";
+
+
+ if(nbr_args<2)
+ err_prn(sfnm,"Function has been called with less than two arguments\n"+susg);
+
+
+
+ if(nbr_args >2 &&!prs_arg->ntl_scn)
+ wrn_prn(sfnm,"Function been called with more than two arguments");
+
+
+ /* data to convert */
+ var=walker.out(args_vtr[0]);
+
+ /* text string output units */
+ var_ud_out=walker.out(args_vtr[1]);
+
+ lcl_typ=var->type;
+ if( !var->undefined && var->type !=NC_FLOAT && var->type !=NC_DOUBLE )
+ nco_var_cnf_typ(NC_DOUBLE,var);
+
+
+ if(prs_arg->ntl_scn ){
+ nco_var_free(var_ud_out);
+ return var;
+ }
+
+ if(var_ud_out->type !=NC_CHAR && var_ud_out->type !=NC_STRING)
+ err_prn(sfnm,"The second argument must be a netCDF text type\n"+susg);
+
+
+
+ {
+
+ /* hack RefAST to something so that we dont have to call astFactory that is protected */
+ RefAST atr=walker.nco_dupList(args_vtr[0]);
+ std::string units_att_nm;
+
+ units_att_nm=std::string(var->nm)+"@units";
+
+ atr->setText(units_att_nm);
+ atr->setType(ATT_ID);
+
+ var_ud_in=walker.out(atr);
+
+ if(var_ud_in->type !=NC_CHAR && var_ud_in->type !=NC_STRING)
+ err_prn(sfnm,"The attribute \""+units_att_nm+"\" argument must be a netCDF text type\n"+susg);
+
+
+ /* look for calendar att - may not be present */
+ units_att_nm=std::string(var->nm)+"@calendar";
+
+ Nvar=prs_arg->var_vtr.find(units_att_nm);
+
+ if(Nvar !=NULL)
+ var_cln=nco_var_dpl(Nvar->var);
+ else
+ var_cln=ncap_att_init(units_att_nm,prs_arg);
+
+ if(var_cln && var_cln->type !=NC_CHAR && var_cln->type !=NC_STRING)
+ err_prn(sfnm,"The attribute \""+units_att_nm+"\" argument must be a netCDF text type\n"+susg);
+
+
+ }
+
+ // do heavy lifting
+ {
+
+ char *units_in_sng;
+ char *units_out_sng;
+ char *cln_sng=NULL_CEWI;
+
+ nco_cln_typ cln_typ=cln_nil;
+
+ units_in_sng=ncap_att_char(var_ud_in);
+ units_out_sng=ncap_att_char(var_ud_out);
+
+ if(var_cln)
+ {
+ cln_sng=ncap_att_char(var_cln);
+ cln_typ=nco_cln_get_cln_typ(cln_sng);
+ }
+
+
+
+ #ifdef ENABLE_UDUNITS
+ # ifdef HAVE_UDUNITS2_H
+ rcd=nco_cln_clc_dbl_var_dff(units_in_sng,units_out_sng,cln_typ,(double*)NULL, var);
+ #endif
+ #endif
+
+ if(rcd!=NCO_NOERR)
+ err_prn(sfnm, "Udunits was unable to convert data in the var '"+std::string(var->nm)+"' from '" +std::string(units_in_sng) +"' to '"+std::string(units_out_sng)+"'\n");
+
+ nco_free(units_in_sng);
+ nco_free(units_out_sng);
+ if(cln_sng)
+ nco_free(cln_sng);
+
+ }
+
+
+ /* revert var back to original type */
+ if( var->type != lcl_typ)
+ nco_var_cnf_typ(lcl_typ,var);
+
+
+ nco_var_free(var_ud_in);
+
+ return var;
+
+}
+
+
+
+
/* ncap2 functions and methods */
/* To avoid confusion when I say FUNC (uppercase) I mean a custom ncap2 function.
diff --git a/src/nco++/fmc_all_cls.hh b/src/nco++/fmc_all_cls.hh
index 2fe2064..90510e7 100644
--- a/src/nco++/fmc_all_cls.hh
+++ b/src/nco++/fmc_all_cls.hh
@@ -292,6 +292,17 @@ public:
};
+// udunits Functions /***************************************/
+class udunits_cls: public vtl_cls {
+private:
+ enum{ PUNITS1};
+ bool _flg_dbg;
+public:
+ udunits_cls(bool flg_dbg);
+ var_sct *fnd(RefAST expr, RefAST fargs,fmc_cls &fmc_obj, ncoTree &walker);
+};
+
+
diff --git a/src/nco++/fmc_gsl_cls.cc b/src/nco++/fmc_gsl_cls.cc
index e2f1a3a..f6b86ef 100644
--- a/src/nco++/fmc_gsl_cls.cc
+++ b/src/nco++/fmc_gsl_cls.cc
@@ -120,7 +120,7 @@ void gsl_cls::gsl_ini_sf(void) {
# if NCO_GSL_VERSION >= 108
gpr_vtr.push_back(gpr_cls("gsl_sf_debye_5",f_unn(gsl_sf_debye_5_e),hnd_fnc_x,NC_DOUBLE));
gpr_vtr.push_back(gpr_cls("gsl_sf_debye_6",f_unn(gsl_sf_debye_6_e),hnd_fnc_x,NC_DOUBLE));
-# endif // NCO_GSL_VERSION < 8
+# endif // NCO_GSL_VERSION < 108
// Dilogarithm
// not implemented as all involve complex numbers
@@ -272,14 +272,14 @@ void gsl_cls::gsl_ini_sf(void) {
gpr_vtr.push_back(gpr_cls("gsl_sf_legendre_Ql",f_unn(gsl_sf_legendre_Ql_e),hnd_fnc_xd,NC_INT));
gpr_vtr.push_back(gpr_cls("gsl_sf_legendre_Plm",f_unn(gsl_sf_legendre_Plm_e),hnd_fnc_iid));
- #if NCO_GSL_VERSION < 200
- gpr_vtr.push_back(gpr_cls("gsl_sf_legendre_Plm_array",f_unn(gsl_sf_legendre_Plm_array),hnd_fnc_iidpd,PLEGEND));
- #endif
// gpr_vtr.push_back(gpr_cls("gsl_sf_legendre_Plm_deriv_array",f_unn(gsl_sf_legendre_Plm_deriv_array),));
gpr_vtr.push_back(gpr_cls("gsl_sf_legendre_sphPlm",f_unn(gsl_sf_legendre_sphPlm_e),hnd_fnc_iid));
+ // 20161118: These two functions were deprecated after GSL version 1.x
+ // Unable them working on grele with bld/Makefile, temporarily disable
#if NCO_GSL_VERSION < 200
- gpr_vtr.push_back(gpr_cls("gsl_sf_legendre_sphPlm_array",f_unn(gsl_sf_legendre_sphPlm_array),hnd_fnc_iidpd,PLEGEND));
+ // gpr_vtr.push_back(gpr_cls("gsl_sf_legendre_Plm_array",f_unn(gsl_sf_legendre_Plm_array),hnd_fnc_iidpd,PLEGEND));
+ // gpr_vtr.push_back(gpr_cls("gsl_sf_legendre_sphPlm_array",f_unn(gsl_sf_legendre_sphPlm_array),hnd_fnc_iidpd,PLEGEND));
#endif
// gpr_vtr.push_back(gpr_cls("gsl_sf_legendre_sphPlm_deriv_array",f_unn(gsl_sf_legendre_sphPlm_deriv_array),));
@@ -407,14 +407,14 @@ void gsl_cls::gsl_ini_cdf(void){
# if NCO_GSL_VERSION >= 108
gpr_vtr.push_back(gpr_cls("gsl_cdf_fdist_Pinv",f_unn(gsl_cdf_fdist_Pinv),hnd_fnc_nd,P3DBLX));
gpr_vtr.push_back(gpr_cls("gsl_cdf_fdist_Qinv",f_unn(gsl_cdf_fdist_Qinv),hnd_fnc_nd,P3DBLX));
-# endif // NCO_GSL_VERSION < 8
+# endif // NCO_GSL_VERSION < 108
gpr_vtr.push_back(gpr_cls("gsl_cdf_beta_P",f_unn(gsl_cdf_beta_P),hnd_fnc_nd,P3DBLX));
gpr_vtr.push_back(gpr_cls("gsl_cdf_beta_Q",f_unn(gsl_cdf_beta_Q),hnd_fnc_nd,P3DBLX));
-# if NCO_GSL_MINOR_VERSION >= 8
+# if NCO_GSL_MAJOR_VERSION >= 2 || ( NCO_GSL_MAJOR_VERSION == 1 && NCO_GSL_MINOR_VERSION >= 8 )
gpr_vtr.push_back(gpr_cls("gsl_cdf_beta_Pinv",f_unn(gsl_cdf_beta_Pinv),hnd_fnc_nd,P3DBLX));
gpr_vtr.push_back(gpr_cls("gsl_cdf_beta_Qinv",f_unn(gsl_cdf_beta_Qinv),hnd_fnc_nd,P3DBLX));
-# endif // NCO_GSL_MINOR_VERSION < 8
+# endif // GSL VERSION > 1.8
gpr_vtr.push_back(gpr_cls("gsl_cdf_flat_P",f_unn(gsl_cdf_flat_P),hnd_fnc_nd,P3DBLX));
gpr_vtr.push_back(gpr_cls("gsl_cdf_flat_Q",f_unn(gsl_cdf_flat_Q),hnd_fnc_nd,P3DBLX));
@@ -525,7 +525,7 @@ void gsl_cls::gsl_ini_ran(void){
# if NCO_GSL_VERSION >= 108
gpr_vtr.push_back(gpr_cls("gsl_ran_gaussian_ziggurat",f_unn(gsl_ran_gaussian_ziggurat),hnd_fnc_rnd,P1DBLX));
-# endif // NCO_GSL_MINOR_VERSION < 8
+# endif // NCO_GSL_VERSION < 108
gpr_vtr.push_back(gpr_cls("gsl_ran_gaussian_pdf",f_unn(gsl_ran_gaussian_pdf),hnd_fnc_nd,P2DBLX));
@@ -3659,7 +3659,7 @@ var_sct *gsl_cls::hnd_fnc_stat4(bool& is_mtd,std::vector<RefAST>&args_vtr,gpr_cl
#endif /* !ENABLE_NETCDF4 */
default: nco_dfl_case_nc_type_err(); break;
} break;
-# endif // NCO_GSL_MINOR_VERSION < 10
+# endif // NCO_GSL_VERSION < 110
case PS_PVAR:
switch(var_arr[0]->type){
@@ -4232,8 +4232,12 @@ var_sct *gsl_cls::hnd_fnc_stat4(bool& is_mtd,std::vector<RefAST>&args_vtr,gpr_cl
case PEVAL:
return eval_fnd(is_mtd,vtr_args,fmc_obj,walker);
break;
+ // 20161205: Always return value to non-void functions: good practice and required by rpmlint
+ default:
+ return NULL;
+ break;
}// end switch
-
+
}
// nb this method is only call with fdx==PEVAL
@@ -4499,19 +4503,11 @@ var_sct *gsl_spl_cls::spl_fnd(bool &is_mtd, std::vector<RefAST> &args_vtr, fmc_c
Nvar->var->val.cp=(char*)spline;
(void)cast_nctype_void(NC_CHAR,&Nvar->var->val);
-
-
-
-
// return true
return ncap_sclr_var_mk("~gsl_spl_cls",(nco_int)1);
-
} // end gsl_spl_cls::spl_fnd
-
-
-
//GSL /****************************************/
// gsl Least Squares Fitting
gsl_fit_cls::gsl_fit_cls(bool flg_dbg){
@@ -4558,10 +4554,12 @@ var_sct *gsl_spl_cls::spl_fnd(bool &is_mtd, std::vector<RefAST> &args_vtr, fmc_c
case PMUL_EST:
return fit_est_fnd(is_mtd,vtr_args,fmc_obj,walker);
break;
+ // 20161205: Always return value to non-void functions: good practice and required by rpmlint
+ default:
+ return NULL;
+ break;
}
-
-
} // end gsl_fit_cls::fnd
diff --git a/src/nco++/fmc_gsl_cls.hh b/src/nco++/fmc_gsl_cls.hh
index b77c9d5..022c65c 100644
--- a/src/nco++/fmc_gsl_cls.hh
+++ b/src/nco++/fmc_gsl_cls.hh
@@ -40,14 +40,18 @@
#include "ncap2_utl.hh"
#include "vtl_cls.hh"
+#ifdef NCO_GSL_MAJOR_VERSION
+# if NCO_GSL_MAJOR_VERSION >= 2
+# define NCO_GSL_VERSION ( NCO_GSL_MAJOR_VERSION * 100 + NCO_GSL_MINOR_VERSION * 10 + NCO_GSL_PATCH_VERSION )
+# endif // NCO_GSL_MAJOR_VERSION
+#endif // NCO_GSL_MAJOR_VERSION
#ifndef NCO_GSL_VERSION
-#ifdef _MSC_VER
-# define NCO_GSL_VERSION 107
-#else
-# define NCO_GSL_VERSION 112
-#endif // _MSC_VER
-#endif // NCO_GSL_MINOR_VERSION
-
+# ifdef _MSC_VER
+# define NCO_GSL_VERSION 107
+# else
+# define NCO_GSL_VERSION 221
+# endif // _MSC_VER
+#endif // NCO_GSL_VERSION
// Some of the gsl_ran_* functions return an unsigned int (NC_UINT)
// netcdf3 has no NC_UINT type So we converte the returned values to an NC_INT
diff --git a/src/nco++/ncap2.cc b/src/nco++/ncap2.cc
index da53663..9ee489c 100644
--- a/src/nco++/ncap2.cc
+++ b/src/nco++/ncap2.cc
@@ -211,6 +211,7 @@ main(int argc,char **argv)
nm_id_sct *xtr_lst_a=NULL_CEWI; /* Initialize to ALL variables in OUTPUT file */
size_t bfr_sz_hnt=NC_SIZEHINT_DEFAULT; /* [B] Buffer size hint */
+ size_t cnk_csh_byt=NCO_CNK_CSH_BYT_DFL; /* [B] Chunk cache size */
size_t cnk_min_byt=NCO_CNK_SZ_MIN_BYT_DFL; /* [B] Minimize size of variable to chunk */
size_t cnk_sz_byt=0UL; /* [B] Chunk size in bytes */
size_t cnk_sz_scl=0UL; /* [nbr] Chunk size scalar */
@@ -575,6 +576,13 @@ main(int argc,char **argv)
(void)pop_fmc_vtr(fmc_vtr,&vlist_obj);
(void)pop_fmc_vtr(fmc_vtr,&print_obj);
(void)pop_fmc_vtr(fmc_vtr,&bnds_obj);
+
+#ifdef ENABLE_UDUNITS
+# ifdef HAVE_UDUNITS2_H
+ udunits_cls udunits_obj(true);
+ (void)pop_fmc_vtr(fmc_vtr,&udunits_obj);
+#endif
+#endif
#ifdef ENABLE_GSL
# ifdef ENABLE_NCO_GSL
@@ -676,7 +684,7 @@ main(int argc,char **argv)
} /* Existing file */
/* Initialize chunking from user-specified inputs */
- if(fl_out_fmt == NC_FORMAT_NETCDF4 || fl_out_fmt == NC_FORMAT_NETCDF4_CLASSIC) rcd+=nco_cnk_ini(in_id,fl_out,cnk_arg,cnk_nbr,cnk_map,cnk_plc,cnk_min_byt,cnk_sz_byt,cnk_sz_scl,&cnk);
+ if(fl_out_fmt == NC_FORMAT_NETCDF4 || fl_out_fmt == NC_FORMAT_NETCDF4_CLASSIC) rcd+=nco_cnk_ini(in_id,fl_out,cnk_arg,cnk_nbr,cnk_map,cnk_plc,cnk_csh_byt,cnk_min_byt,cnk_sz_byt,cnk_sz_scl,&cnk);
/* Copy global attributes */
(void)nco_att_cpy(in_id,out_id,NC_GLOBAL,NC_GLOBAL,(nco_bool)True);
@@ -714,7 +722,7 @@ main(int argc,char **argv)
prs_arg.NCAP_MPI_SORT=(thr_nbr > 1 ? true:false);
prs_arg.dfl_lvl=dfl_lvl; /* [enm] Deflate level */
- prs_arg.cnk_sz=(size_t*)NULL; /* Chunk sizes NULL for now */
+ prs_arg.cnk_sz=(size_t *)NULL; /* Chunk sizes NULL for now */
#ifdef NCO_NETCDF4_AND_FILLVALUE
prs_arg.NCAP4_FILL=(fl_out_fmt == NC_FORMAT_NETCDF4 || fl_out_fmt == NC_FORMAT_NETCDF4_CLASSIC);
diff --git a/src/nco++/ncap2_utl.cc b/src/nco++/ncap2_utl.cc
index 635d23b..ca256f3 100644
--- a/src/nco++/ncap2_utl.cc
+++ b/src/nco++/ncap2_utl.cc
@@ -647,6 +647,43 @@ ncap_att_str /* extract string(s) from a NC_CHAR or NC_STRING type attribute */
}
+
+char * /* new malloc'ed string */
+ncap_att_char /* extract string from a NC_CHAR or first NC_STRING */
+(var_sct *var_att)
+{
+ int idx;
+ char *cstr;
+
+ (void)cast_void_nctype((nc_type)var_att->type,&var_att->val);
+
+ if(var_att->type==NC_STRING)
+ {
+ /* grab only first string */
+ cstr=strdup(var_att->val.sngp[0]);
+
+ }
+
+ if(var_att->type==NC_CHAR)
+ {
+ cstr=(char*)nco_malloc( (var_att->sz+1) *sizeof(char));
+ strncpy(cstr, var_att->val.cp, var_att->sz);
+ cstr[var_att->sz]='\0';
+ }
+ (void)cast_nctype_void((nc_type)var_att->type,&var_att->val);
+
+ return cstr;
+
+}
+
+
+
+
+
+
+
+
+
var_sct * /* O [sct] Remainder of modulo operation of input variables (var1%var2) */
ncap_var_var_mod /* [fnc] Remainder (modulo) operation of two variables */
(var_sct *var1, /* I [sc,t] Variable structure containing field */
diff --git a/src/nco++/ncap2_utl.hh b/src/nco++/ncap2_utl.hh
index ba0d192..d3f5ffe 100644
--- a/src/nco++/ncap2_utl.hh
+++ b/src/nco++/ncap2_utl.hh
@@ -143,11 +143,18 @@ ncap_att_sprn /* [fnc] Print a single attribute*/
+
int /* number appended */
ncap_att_str /* extract string(s) from a NC_CHAR or NC_STRING type attribute */
(var_sct *var_att, /* I [sct] input attribute */
std::vector<std::string> &str_vtr);
+char * /* new malloc'ed string */
+ncap_att_char /* extract string from a NC_CHAR or first NC_STRING */
+(var_sct *var_att);
+
+
+
var_sct * /* O [sct] Remainder of modulo operation of input variables (var_1%var_2) */
ncap_var_var_mod /* [fnc] Remainder (modulo) operation of two variables */
(var_sct *var_1, /* I [sc,t] Variable structure containing field */
diff --git a/src/nco++/prs_cls.cc b/src/nco++/prs_cls.cc
index 58a193d..ab30cb8 100644
--- a/src/nco++/prs_cls.cc
+++ b/src/nco++/prs_cls.cc
@@ -466,7 +466,20 @@ prs_cls::ncap_var_write_omp(
/* Set chunk sizes, if requested */
// fxm: must first allow cnk_sz specification in ncap2.cc main()
- //if(var->cnk_sz && var->nbr_dim > 0) (void)nco_def_var_chunking(out_id,var_id,(int)NC_CHUNKED,var->cnk_sz);
+
+ /* if var does NOT contain a rec dim then set all chunking to contiguous */
+ {
+ int idx;
+ size_t cnks[NC_MAX_VAR_DIMS]={0};
+ for(idx=0;idx<var->nbr_dim;idx++)
+ if( var->dim[idx]->is_rec_dmn )
+ break;
+
+ if( idx==var->nbr_dim)
+ (void)nco_def_var_chunking(out_id,var_out_id,(int)NC_CONTIGUOUS,cnks);
+ }
+
+
} /* endif netCDF4 */
} /* endif */
} // bdef
@@ -529,8 +542,10 @@ prs_cls::ncap_var_write_omp(
void prs_cls::ncap_def_ntl_scn(void)
{
int idx;
+ int jdx;
int sz;
int var_id;
+ size_t cnks[NC_MAX_VAR_DIMS]={0};
NcapVar *Nvar;
NcapVar *Cvar;
var_sct *var1;
@@ -567,7 +582,13 @@ void prs_cls::ncap_def_ntl_scn(void)
/* Set chunk sizes, if requested */
// fxm: must first allow cnk_sz specification in ncap2.cc main()
- //if(var1->cnk_sz && var1->nbr_dim > 0) (void)nco_def_var_chunking(out_id,var_id,(int)NC_CHUNKED,var1->cnk_sz);
+ for(jdx=0;jdx<var1->nbr_dim;jdx++)
+ if( var1->dim[jdx]->is_rec_dmn )
+ break;
+
+ if( jdx==var1->nbr_dim)
+ (void)nco_def_var_chunking(out_id,var_id,(int)NC_CONTIGUOUS,cnks);
+
} /* endif */
} /* endif netCDF4 */
diff --git a/src/nco/Makefile.am b/src/nco/Makefile.am
index 83a604a..b38a7e9 100644
--- a/src/nco/Makefile.am
+++ b/src/nco/Makefile.am
@@ -26,6 +26,8 @@ else
bin_PROGRAMS = ${NCAP} ncatted ncbo ncecat ncflint ncks ncpdq ncra ncrename ${NCWA}
endif
+BUILT_SOURCES = ncap_yacc.h
+
AM_YFLAGS = -d --name-prefix=nco_yy
ncap_SOURCES = ncap_utl.c ncap.c ncap_yacc.y ncap_lex.l
diff --git a/src/nco/Makefile.in b/src/nco/Makefile.in
index a4ce744..5df0835 100644
--- a/src/nco/Makefile.in
+++ b/src/nco/Makefile.in
@@ -669,6 +669,7 @@ top_srcdir = @top_srcdir@
@NCO_NEED_GETOPT_LONG_TRUE at NCO_GETOPT_C = nco_getopt.c
@NCO_NEED_GETOPT_LONG_FALSE at NCO_GETOPT_H =
@NCO_NEED_GETOPT_LONG_TRUE at NCO_GETOPT_H = nco_getopt.h
+BUILT_SOURCES = ncap_yacc.h
AM_YFLAGS = -d --name-prefix=nco_yy
ncap_SOURCES = ncap_utl.c ncap.c ncap_yacc.y ncap_lex.l
ncap_LDADD = libnco.la
@@ -816,7 +817,8 @@ nco_var_utl.c
EXTRA_DIST = ${NCO_GETOPT_C}
DISTCLEANFILES = ncap_lex.c ncap_yacc.c ncap_yacc.h
CLEANFILES = ncdiff ncea nces ncrcat mpncdiff mpncea mpnces mpncrcat
-all: all-am
+all: $(BUILT_SOURCES)
+ $(MAKE) $(AM_MAKEFLAGS) all-am
.SUFFIXES:
.SUFFIXES: .c .l .lo .log .o .obj .test .test$(EXEEXT) .trs .y
@@ -1345,7 +1347,8 @@ distdir: $(DISTFILES)
done
check-am: all-am
$(MAKE) $(AM_MAKEFLAGS) check-TESTS
-check: check-am
+check: $(BUILT_SOURCES)
+ $(MAKE) $(AM_MAKEFLAGS) check-am
all-am: Makefile $(LTLIBRARIES) $(PROGRAMS)
install-binPROGRAMS: install-libLTLIBRARIES
@@ -1353,7 +1356,8 @@ installdirs:
for dir in "$(DESTDIR)$(libdir)" "$(DESTDIR)$(bindir)"; do \
test -z "$$dir" || $(MKDIR_P) "$$dir"; \
done
-install: install-am
+install: $(BUILT_SOURCES)
+ $(MAKE) $(AM_MAKEFLAGS) install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
@@ -1391,6 +1395,7 @@ maintainer-clean-generic:
-rm -f ncap_lex.c
-rm -f ncap_yacc.c
-rm -f ncap_yacc.h
+ -test -z "$(BUILT_SOURCES)" || rm -f $(BUILT_SOURCES)
clean: clean-am
clean-am: clean-binPROGRAMS clean-generic clean-libLTLIBRARIES \
@@ -1464,7 +1469,8 @@ ps-am:
uninstall-am: uninstall-binPROGRAMS uninstall-libLTLIBRARIES
@$(NORMAL_INSTALL)
$(MAKE) $(AM_MAKEFLAGS) uninstall-hook
-.MAKE: check-am install-am install-exec-am install-strip uninstall-am
+.MAKE: all check check-am install install-am install-exec-am \
+ install-strip uninstall-am
.PHONY: CTAGS GTAGS TAGS all all-am check check-TESTS check-am clean \
clean-binPROGRAMS clean-generic clean-libLTLIBRARIES \
diff --git a/src/nco/mpncbo.c b/src/nco/mpncbo.c
index d86eacf..7a21053 100644
--- a/src/nco/mpncbo.c
+++ b/src/nco/mpncbo.c
@@ -205,6 +205,7 @@ main(int argc,char **argv)
nm_id_sct *xtr_lst_2=NULL; /* xtr_lst_2 may be alloc()'d from NULL with -c option */
size_t bfr_sz_hnt=NC_SIZEHINT_DEFAULT; /* [B] Buffer size hint */
+ size_t cnk_csh_byt=NCO_CNK_CSH_BYT_DFL; /* [B] Chunk cache size */
size_t cnk_min_byt=NCO_CNK_SZ_MIN_BYT_DFL; /* [B] Minimize size of variable to chunk */
size_t cnk_sz_byt=0UL; /* [B] Chunk size in bytes */
size_t cnk_sz_scl=0UL; /* [nbr] Chunk size scalar */
diff --git a/src/nco/mpncecat.c b/src/nco/mpncecat.c
index 5000295..ecc0be3 100644
--- a/src/nco/mpncecat.c
+++ b/src/nco/mpncecat.c
@@ -162,6 +162,7 @@ main(int argc,char **argv)
nm_id_sct *xtr_lst=NULL; /* xtr_lst may be alloc()'d from NULL with -c option */
size_t bfr_sz_hnt=NC_SIZEHINT_DEFAULT; /* [B] Buffer size hint */
+ size_t cnk_csh_byt=NCO_CNK_CSH_BYT_DFL; /* [B] Chunk cache size */
size_t cnk_min_byt=NCO_CNK_SZ_MIN_BYT_DFL; /* [B] Minimize size of variable to chunk */
size_t cnk_sz_byt=0UL; /* [B] Chunk size in bytes */
size_t cnk_sz_scl=0UL; /* [nbr] Chunk size scalar */
diff --git a/src/nco/mpncflint.c b/src/nco/mpncflint.c
index aed9f43..40c5f97 100644
--- a/src/nco/mpncflint.c
+++ b/src/nco/mpncflint.c
@@ -178,6 +178,7 @@ main(int argc,char **argv)
nm_id_sct *xtr_lst=NULL; /* xtr_lst may be alloc()'d from NULL with -c option */
size_t bfr_sz_hnt=NC_SIZEHINT_DEFAULT; /* [B] Buffer size hint */
+ size_t cnk_csh_byt=NCO_CNK_CSH_BYT_DFL; /* [B] Chunk cache size */
size_t cnk_min_byt=NCO_CNK_SZ_MIN_BYT_DFL; /* [B] Minimize size of variable to chunk */
size_t cnk_sz_byt=0UL; /* [B] Chunk size in bytes */
size_t cnk_sz_scl=0UL; /* [nbr] Chunk size scalar */
diff --git a/src/nco/mpncpdq.c b/src/nco/mpncpdq.c
index c896ecc..c9a4a46 100644
--- a/src/nco/mpncpdq.c
+++ b/src/nco/mpncpdq.c
@@ -186,6 +186,7 @@ main(int argc,char **argv)
nm_id_sct *xtr_lst=NULL; /* xtr_lst may be alloc()'d from NULL with -c option */
size_t bfr_sz_hnt=NC_SIZEHINT_DEFAULT; /* [B] Buffer size hint */
+ size_t cnk_csh_byt=NCO_CNK_CSH_BYT_DFL; /* [B] Chunk cache size */
size_t cnk_min_byt=NCO_CNK_SZ_MIN_BYT_DFL; /* [B] Minimize size of variable to chunk */
size_t cnk_sz_byt=0UL; /* [B] Chunk size in bytes */
size_t cnk_sz_scl=0UL; /* [nbr] Chunk size scalar */
diff --git a/src/nco/mpncra.c b/src/nco/mpncra.c
index 5b0b876..b5eed96 100644
--- a/src/nco/mpncra.c
+++ b/src/nco/mpncra.c
@@ -213,6 +213,7 @@ main(int argc,char **argv)
nm_id_sct *xtr_lst=NULL; /* xtr_lst may be alloc()'d from NULL with -c option */
size_t bfr_sz_hnt=NC_SIZEHINT_DEFAULT; /* [B] Buffer size hint */
+ size_t cnk_csh_byt=NCO_CNK_CSH_BYT_DFL; /* [B] Chunk cache size */
size_t cnk_min_byt=NCO_CNK_SZ_MIN_BYT_DFL; /* [B] Minimize size of variable to chunk */
size_t cnk_sz_byt=0UL; /* [B] Chunk size in bytes */
size_t cnk_sz_scl=0UL; /* [nbr] Chunk size scalar */
diff --git a/src/nco/mpncwa.c b/src/nco/mpncwa.c
index 19e3ae0..91e0081 100644
--- a/src/nco/mpncwa.c
+++ b/src/nco/mpncwa.c
@@ -198,6 +198,7 @@ main(int argc,char **argv)
prs_sct prs_arg; /* I/O [sct] Global information required in ncwa parser */
size_t bfr_sz_hnt=NC_SIZEHINT_DEFAULT; /* [B] Buffer size hint */
+ size_t cnk_csh_byt=NCO_CNK_CSH_BYT_DFL; /* [B] Chunk cache size */
size_t cnk_min_byt=NCO_CNK_SZ_MIN_BYT_DFL; /* [B] Minimize size of variable to chunk */
size_t cnk_sz_byt=0UL; /* [B] Chunk size in bytes */
size_t cnk_sz_scl=0UL; /* [nbr] Chunk size scalar */
@@ -1064,7 +1065,7 @@ main(int argc,char **argv)
/* Reduce variable over specified dimensions (tally array is set here)
NB: var_prc_out[idx] is new, so corresponding var_out[idx] is dangling */
var_prc_out[idx]=nco_var_avg(var_prc_out[idx],dmn_avg,dmn_avg_nbr,nco_op_typ,flg_rdd,&ddra_info);
- /* var_prc_out[idx]->val now holds numerator of averaging expression documented in NCO User's Guide
+ /* var_prc_out[idx]->val now holds numerator of averaging expression documented in NCO Users Guide
Denominator is also tricky due to sundry normalization options
These logical switches are VERY tricky---be careful modifying them */
if(NRM_BY_DNM && DO_CONFORM_WGT && (!var_prc[idx]->is_crd_var || WGT_MSK_CRD_VAR)){
diff --git a/src/nco/ncap.c b/src/nco/ncap.c
index 3d0c1e8..4b83ab8 100644
--- a/src/nco/ncap.c
+++ b/src/nco/ncap.c
@@ -233,6 +233,7 @@ main(int argc,char **argv)
nm_id_sct *xtr_lst_a=NULL; /* Initialize to ALL variables in OUTPUT file */
size_t bfr_sz_hnt=NC_SIZEHINT_DEFAULT; /* [B] Buffer size hint */
+ size_t cnk_csh_byt=NCO_CNK_CSH_BYT_DFL; /* [B] Chunk cache size */
size_t cnk_min_byt=NCO_CNK_SZ_MIN_BYT_DFL; /* [B] Minimize size of variable to chunk */
size_t cnk_sz_byt=0UL; /* [B] Chunk size in bytes */
size_t cnk_sz_scl=0UL; /* [nbr] Chunk size scalar */
diff --git a/src/nco/ncap_lex.l b/src/nco/ncap_lex.l
index 1af99fc..d0edc45 100644
--- a/src/nco/ncap_lex.l
+++ b/src/nco/ncap_lex.l
@@ -101,6 +101,7 @@
/* 20150115: MacOSX with clang and --enable-debug-custom fails on yyget_leng() prototype
Change return type of yyget_leng() from int to yy_size_t */
/* int yyget_leng(void); *//* fixes: warning: no previous prototype for `yyget_leng' */
+ /* 20161116: Debian (e.g., glace) NCO build fails in ncap_lex.l due to conflicting types for `yyget_leng' */
yy_size_t yyget_leng(void); /* fixes: warning: no previous prototype for `yyget_leng' */
int yyget_lineno(void); /* fixes: warning: no previous prototype for `yyget_lineno' */
int yylex_destroy(void); /* fixes: warning: no previous prototype for `yylex_destroy' */
diff --git a/src/nco/ncap_yacc.y b/src/nco/ncap_yacc.y
index b9edc15..4a16ab9 100644
--- a/src/nco/ncap_yacc.y
+++ b/src/nco/ncap_yacc.y
@@ -42,6 +42,14 @@
#include <stdarg.h> /* va_start, va_arg, va_end */
#include <stdio.h> /* stderr, FILE, NULL, etc. */
#include <stdlib.h> /* atof, atoi, malloc, getopt */
+ /* 20161204 GNU since gcc 4.7.3 string.h provides strcasestr() as non-standard extension iff _GNU_SOURCE is defined
+ 20161205 stpcpy() prototype provided since glibc 2.10 with _POSIX_C_SOURCE >= 200809L and before glibc 2.10 with _GNU_SOURCE
+ Abandoned _GNU_SOURCE because unable to get ncap to find stpcpy() prototype */
+#if 0
+#ifdef __GNUC__
+# define _GNU_SOURCE
+#endif /* __GNUC__ */
+#endif
#include <string.h> /* strcmp() */
#include <assert.h>
/* 3rd party vendors */
diff --git a/src/nco/ncbo.c b/src/nco/ncbo.c
index 9667c54..273da69 100644
--- a/src/nco/ncbo.c
+++ b/src/nco/ncbo.c
@@ -217,6 +217,7 @@ main(int argc,char **argv)
int var_lst_in_nbr=0;
size_t bfr_sz_hnt=NC_SIZEHINT_DEFAULT; /* [B] Buffer size hint */
+ size_t cnk_csh_byt=NCO_CNK_CSH_BYT_DFL; /* [B] Chunk cache size */
size_t cnk_min_byt=NCO_CNK_SZ_MIN_BYT_DFL; /* [B] Minimize size of variable to chunk */
size_t cnk_sz_byt=0UL; /* [B] Chunk size in bytes */
size_t cnk_sz_scl=0UL; /* [nbr] Chunk size scalar */
@@ -622,7 +623,7 @@ main(int argc,char **argv)
fl_out_tmp=nco_fl_out_open(fl_out,&FORCE_APPEND,FORCE_OVERWRITE,fl_out_fmt,&bfr_sz_hnt,RAM_CREATE,RAM_OPEN,WRT_TMP_FL,&out_id);
/* Initialize chunking from user-specified inputs */
- if(fl_out_fmt == NC_FORMAT_NETCDF4 || fl_out_fmt == NC_FORMAT_NETCDF4_CLASSIC) rcd+=nco_cnk_ini(in_id_1,fl_out,cnk_arg,cnk_nbr,cnk_map,cnk_plc,cnk_min_byt,cnk_sz_byt,cnk_sz_scl,&cnk);
+ if(fl_out_fmt == NC_FORMAT_NETCDF4 || fl_out_fmt == NC_FORMAT_NETCDF4_CLASSIC) rcd+=nco_cnk_ini(in_id_1,fl_out,cnk_arg,cnk_nbr,cnk_map,cnk_plc,cnk_csh_byt,cnk_min_byt,cnk_sz_byt,cnk_sz_scl,&cnk);
if(gpe){
if(nco_dbg_lvl >= nco_dbg_fl) (void)fprintf(stderr,"%s: INFO Group Path Edit (GPE) feature enabled\n",nco_prg_nm_get());
diff --git a/src/nco/ncecat.c b/src/nco/ncecat.c
index 0a4eb90..b7254d6 100644
--- a/src/nco/ncecat.c
+++ b/src/nco/ncecat.c
@@ -205,6 +205,7 @@ main(int argc,char **argv)
md5_sct *md5=NULL; /* [sct] MD5 configuration */
size_t bfr_sz_hnt=NC_SIZEHINT_DEFAULT; /* [B] Buffer size hint */
+ size_t cnk_csh_byt=NCO_CNK_CSH_BYT_DFL; /* [B] Chunk cache size */
size_t cnk_min_byt=NCO_CNK_SZ_MIN_BYT_DFL; /* [B] Minimize size of variable to chunk */
size_t cnk_sz_byt=0UL; /* [B] Chunk size in bytes */
size_t cnk_sz_scl=0UL; /* [nbr] Chunk size scalar */
@@ -633,7 +634,7 @@ main(int argc,char **argv)
fl_out_tmp=nco_fl_out_open(fl_out,&FORCE_APPEND,FORCE_OVERWRITE,fl_out_fmt,&bfr_sz_hnt,RAM_CREATE,RAM_OPEN,WRT_TMP_FL,&out_id);
/* Initialize chunking from user-specified inputs */
- if(fl_out_fmt == NC_FORMAT_NETCDF4 || fl_out_fmt == NC_FORMAT_NETCDF4_CLASSIC) rcd+=nco_cnk_ini(in_id,fl_out,cnk_arg,cnk_nbr,cnk_map,cnk_plc,cnk_min_byt,cnk_sz_byt,cnk_sz_scl,&cnk);
+ if(fl_out_fmt == NC_FORMAT_NETCDF4 || fl_out_fmt == NC_FORMAT_NETCDF4_CLASSIC) rcd+=nco_cnk_ini(in_id,fl_out,cnk_arg,cnk_nbr,cnk_map,cnk_plc,cnk_csh_byt,cnk_min_byt,cnk_sz_byt,cnk_sz_scl,&cnk);
/* ncecat-specific operations */
if(RECORD_AGGREGATE){
diff --git a/src/nco/ncflint.c b/src/nco/ncflint.c
index c089988..83c12c8 100644
--- a/src/nco/ncflint.c
+++ b/src/nco/ncflint.c
@@ -202,6 +202,7 @@ main(int argc,char **argv)
md5_sct *md5=NULL; /* [sct] MD5 configuration */
size_t bfr_sz_hnt=NC_SIZEHINT_DEFAULT; /* [B] Buffer size hint */
+ size_t cnk_csh_byt=NCO_CNK_CSH_BYT_DFL; /* [B] Chunk cache size */
size_t cnk_min_byt=NCO_CNK_SZ_MIN_BYT_DFL; /* [B] Minimize size of variable to chunk */
size_t cnk_sz_byt=0UL; /* [B] Chunk size in bytes */
size_t cnk_sz_scl=0UL; /* [nbr] Chunk size scalar */
@@ -657,7 +658,7 @@ main(int argc,char **argv)
fl_out_tmp=nco_fl_out_open(fl_out,&FORCE_APPEND,FORCE_OVERWRITE,fl_out_fmt,&bfr_sz_hnt,RAM_CREATE,RAM_OPEN,WRT_TMP_FL,&out_id);
/* Initialize chunking from user-specified inputs */
- if(fl_out_fmt == NC_FORMAT_NETCDF4 || fl_out_fmt == NC_FORMAT_NETCDF4_CLASSIC) rcd+=nco_cnk_ini(in_id_1,fl_out,cnk_arg,cnk_nbr,cnk_map,cnk_plc,cnk_min_byt,cnk_sz_byt,cnk_sz_scl,&cnk);
+ if(fl_out_fmt == NC_FORMAT_NETCDF4 || fl_out_fmt == NC_FORMAT_NETCDF4_CLASSIC) rcd+=nco_cnk_ini(in_id_1,fl_out,cnk_arg,cnk_nbr,cnk_map,cnk_plc,cnk_csh_byt,cnk_min_byt,cnk_sz_byt,cnk_sz_scl,&cnk);
/* Transfer variable type to table. NOTE: Using var/xtr_nbr containing all variables (processed, fixed) */
(void)nco_var_typ_trv(xtr_nbr,var,trv_tbl);
diff --git a/src/nco/ncks.c b/src/nco/ncks.c
index 2797530..261ec12 100644
--- a/src/nco/ncks.c
+++ b/src/nco/ncks.c
@@ -178,6 +178,8 @@ main(int argc,char **argv)
int *in_id_arr; /* [id] netCDF file IDs used by OpenMP code */
int JSN_ATT_FMT=0; /* [enm] JSON format for netCDF attributes: 0 (no object, only data), 1 (data only for string, char, int, and floating-point types, otherwise object), 2 (always object) */
+ nco_bool JSN_DATA_BRK=True; /* [flg] JSON format for netCDF variables: 0 (no brackets), 1 (bracket inner dimensions of multi-dimensional data) */
+
int abb_arg_nbr=0;
int att_glb_nbr;
int att_grp_nbr;
@@ -268,6 +270,7 @@ main(int argc,char **argv)
nco_dmn_dne_t *flg_dne=NULL; /* [lst] Flag to check if input dimension -d "does not exist" */
size_t bfr_sz_hnt=NC_SIZEHINT_DEFAULT; /* [B] Buffer size hint */
+ size_t cnk_csh_byt=NCO_CNK_CSH_BYT_DFL; /* [B] Chunk cache size */
size_t cnk_min_byt=NCO_CNK_SZ_MIN_BYT_DFL; /* [B] Minimize size of variable to chunk */
size_t cnk_sz_byt=0UL; /* [B] Chunk size in bytes */
size_t cnk_sz_scl=0UL; /* [nbr] Chunk size scalar */
@@ -354,6 +357,8 @@ main(int argc,char **argv)
{"buffer_size_hint",required_argument,0,0}, /* [B] Buffer size hint */
{"cnk_byt",required_argument,0,0}, /* [B] Chunk size in bytes */
{"chunk_byte",required_argument,0,0}, /* [B] Chunk size in bytes */
+ {"cnk_csh",required_argument,0,0}, /* [B] Chunk cache size in bytes */
+ {"chunk_cache",required_argument,0,0}, /* [B] Chunk cache size in bytes */
{"cnk_dmn",required_argument,0,0}, /* [nbr] Chunk size */
{"chunk_dimension",required_argument,0,0}, /* [nbr] Chunk size */
{"cnk_map",required_argument,0,0}, /* [nbr] Chunking map */
@@ -372,7 +377,10 @@ main(int argc,char **argv)
{"glb_att_add",required_argument,0,0}, /* [sng] Global attribute add */
{"hdr_pad",required_argument,0,0},
{"header_pad",required_argument,0,0},
- {"jsn_att_fmt",required_argument,0,0}, /* [enm] JSON attribute format */
+ {"jsn_fmt",required_argument,0,0}, /* [enm] JSON format */
+ {"jsn_format",required_argument,0,0}, /* [enm] JSON format */
+ {"json_fmt",required_argument,0,0}, /* [enm] JSON format */
+ {"json_format",required_argument,0,0}, /* [enm] JSON format */
{"mk_rec_dmn",required_argument,0,0}, /* [sng] Name of record dimension in output */
{"mk_rec_dim",required_argument,0,0}, /* [sng] Name of record dimension in output */
{"mta_dlm",required_argument,0,0}, /* [sng] Multi-argument delimiter */
@@ -532,6 +540,10 @@ main(int argc,char **argv)
cnk_sz_byt=strtoul(optarg,&sng_cnv_rcd,NCO_SNG_CNV_BASE10);
if(*sng_cnv_rcd) nco_sng_cnv_err(optarg,"strtoul",sng_cnv_rcd);
} /* endif cnk_byt */
+ if(!strcmp(opt_crr,"cnk_csh") || !strcmp(opt_crr,"chunk_cache")){
+ cnk_csh_byt=strtoul(optarg,&sng_cnv_rcd,NCO_SNG_CNV_BASE10);
+ if(*sng_cnv_rcd) nco_sng_cnv_err(optarg,"strtoul",sng_cnv_rcd);
+ } /* endif cnk_csh */
if(!strcmp(opt_crr,"cnk_min") || !strcmp(opt_crr,"chunk_min")){
cnk_min_byt=strtoul(optarg,&sng_cnv_rcd,NCO_SNG_CNV_BASE10);
if(*sng_cnv_rcd) nco_sng_cnv_err(optarg,"strtoul",sng_cnv_rcd);
@@ -681,7 +693,7 @@ main(int argc,char **argv)
#endif /* !ENABLE_UDUNITS */
cp=strdup(optarg);
args=nco_lst_prs_1D(cp,",",&lmt_nbr);
- nco_cln_clc_org(args[0],args[1],(lmt_nbr > 2 ? nco_cln_get_cln_typ(args[2]) : cln_nil),&crr_val);
+ nco_cln_clc_dbl_org(args[0],args[1],(lmt_nbr > 2 ? nco_cln_get_cln_typ(args[2]) : cln_nil),&crr_val);
(void)fprintf(stdout,"Units in=%s, units out=%s, difference (date) or conversion (non-date) = %f\n",args[0],args[1],crr_val);
if(cp) cp=(char *)nco_free(cp);
nco_exit(EXIT_SUCCESS);
@@ -696,10 +708,13 @@ main(int argc,char **argv)
(void)nco_vrs_prn(CVS_Id,CVS_Revision);
nco_exit(EXIT_SUCCESS);
} /* endif "vrs" */
- if(!strcmp(opt_crr,"jsn_att_fmt")){
- PRN_JSN=True; /* [flg] Print JSON */
+ if(!strcmp(opt_crr,"jsn_fmt") || !strcmp(opt_crr,"json_format") || !strcmp(opt_crr,"json_fmt") || !strcmp(opt_crr,"jsn_format")){
+ PRN_JSN=True;
JSN_ATT_FMT=(int)strtoul(optarg,&sng_cnv_rcd,NCO_SNG_CNV_BASE10);
if(*sng_cnv_rcd) nco_sng_cnv_err(optarg,"strtoul",sng_cnv_rcd);
+ if(JSN_ATT_FMT >= 4) JSN_DATA_BRK=False; /* [flg] Print JSON with bracket data */
+ JSN_ATT_FMT%=4; /* 20161221: Valid values are 0,1,2 */
+ if(JSN_ATT_FMT == 3) JSN_ATT_FMT=2;
} /* !jsn_att_fmt */
if(!strcmp(opt_crr,"wrt_tmp_fl") || !strcmp(opt_crr,"write_tmp_fl")) WRT_TMP_FL=True;
if(!strcmp(opt_crr,"no_tmp_fl")) WRT_TMP_FL=False;
@@ -1074,7 +1089,7 @@ main(int argc,char **argv)
fl_out_tmp=nco_fl_out_open(fl_out,&FORCE_APPEND,FORCE_OVERWRITE,fl_out_fmt,&bfr_sz_hnt,RAM_CREATE,RAM_OPEN,WRT_TMP_FL,&out_id);
/* Initialize chunking from user-specified inputs */
- if(fl_out_fmt == NC_FORMAT_NETCDF4 || fl_out_fmt == NC_FORMAT_NETCDF4_CLASSIC) rcd+=nco_cnk_ini(in_id,fl_out,cnk_arg,cnk_nbr,cnk_map,cnk_plc,cnk_min_byt,cnk_sz_byt,cnk_sz_scl,&cnk);
+ if(fl_out_fmt == NC_FORMAT_NETCDF4 || fl_out_fmt == NC_FORMAT_NETCDF4_CLASSIC) rcd+=nco_cnk_ini(in_id,fl_out,cnk_arg,cnk_nbr,cnk_map,cnk_plc,cnk_csh_byt,cnk_min_byt,cnk_sz_byt,cnk_sz_scl,&cnk);
/* Define extracted groups, variables, and attributes in output file */
CPY_GRP_METADATA=PRN_GLB_METADATA;
@@ -1194,8 +1209,12 @@ main(int argc,char **argv)
/* JSON numerical arrays have no notion of missing values */
prn_flg.PRN_MSS_VAL_BLANK=False;
prn_flg.jsn_att_fmt=JSN_ATT_FMT;
- } /* endif JSON */
-
+ prn_flg.jsn_data_brk=JSN_DATA_BRK;
+ }else { /* endif JSON */
+ prn_flg.jsn_att_fmt=0;
+ prn_flg.jsn_data_brk=False;
+ } /* !JSON */
+
if(prn_flg.xml) prn_flg.PRN_MSS_VAL_BLANK=False;
/* File summary */
diff --git a/src/nco/nco.h b/src/nco/nco.h
index 52f4bd9..b65e27b 100644
--- a/src/nco/nco.h
+++ b/src/nco/nco.h
@@ -169,6 +169,13 @@ extern "C" {
/* Argument to strtol() and strtoul() indicating base-10 conversions */
#define NCO_SNG_CNV_BASE10 10
+ /* 20161121 Chunk cache size default
+ http://www.unidata.ucar.edu/software/netcdf/docs/netcdf_perf_chunking.html
+ netCDF cache size default settable at netCDF build time with --with-chunk-cache-size option
+ If NCO default == 0, then NCO will use whatever default built-into netCDF library
+ If NCO default > 0, then NCO will override netCDF default */
+#define NCO_CNK_CSH_BYT_DFL 0
+
/* netCDF 4.3.2 (201404) implements a configure-time constant called DEFAULT_CHUNK_SIZE = 4194304 = 4 MB
This is a good size for HPC systems with MB-scale blocksizes
Token is not in netcdf.h, and NCO's equivalent need not match netCDF's
@@ -329,7 +336,7 @@ extern "C" {
# define NCO_VERSION_MINOR 6
#endif /* !NCO_VERSION_MINOR */
#ifndef NCO_VERSION_PATCH
-# define NCO_VERSION_PATCH 2
+# define NCO_VERSION_PATCH 3
#endif /* !NCO_VERSION_PATCH */
#ifndef NCO_VERSION_NOTE
# define NCO_VERSION_NOTE "" /* Blank for final versions, non-blank (e.g., "beta37") for pre-release versions */
@@ -339,7 +346,7 @@ extern "C" {
# define NCO_LIB_VERSION ( NCO_VERSION_MAJOR * 100 + NCO_VERSION_MINOR * 10 + NCO_VERSION_PATCH )
#endif /* !NCO_LIB_VERSION */
#ifndef NCO_VERSION
-# define NCO_VERSION "4.6.2"
+# define NCO_VERSION "4.6.3"
#endif /* !NCO_VERSION */
/* Compatibility tokens new to netCDF4 netcdf.h: */
@@ -788,7 +795,7 @@ extern "C" {
double max_val; /* Double precision representation of maximum value of coordinate requested or implied */
double min_val; /* Double precision representation of minimum value of coordinate requested or implied */
- double origin; /* Used by ncra, ncrcat to re-base record coordinate */
+ double origin; /* Used by ncra, ncrcat to re-base record coordinate */
int id; /* Dimension ID */
int lmt_typ; /* crd_val or dmn_idx */
@@ -948,6 +955,7 @@ extern "C" {
nco_bool nwl_pst_val; /* [flg] Print newline after variable values */
int fll_pth; /* [nbr] Print full paths */
int jsn_att_fmt; /* [enm] JSON format for netCDF attributes: 0 (no object, only data), 1 (data only for string, char, int, and floating-point types, otherwise object), 2 (always object) */
+ int jsn_data_brk; /* [flg] JSON format for netCDF variables: 0 (no bracketing of var data ), 1 ( bracketing of var data )*/
int nbr_zro; /* [nbr] Trailing zeros allowed after decimal point */
int ndn; /* [nbr] Indentation */
int spc_per_lvl; /* [nbr] Indentation spaces per group level */
@@ -1171,6 +1179,7 @@ extern "C" {
cnk_dmn_sct **cnk_dmn; /* [sct] User-specified per-dimension chunking information */
int cnk_map; /* [enm] Chunking map */
int cnk_plc; /* [enm] Chunking policy */
+ size_t cnk_csh_byt; /* [B] Chunk cache size */
size_t cnk_min_byt; /* [B] Minimize size of variable to chunk */
size_t cnk_sz_byt; /* [B] Chunk size in Bytes */
size_t cnk_sz_scl; /* [nbr] Chunk size scalar */
diff --git a/src/nco/nco_att_utl.c b/src/nco/nco_att_utl.c
index 8d414b6..e7420d1 100644
--- a/src/nco/nco_att_utl.c
+++ b/src/nco/nco_att_utl.c
@@ -1981,7 +1981,7 @@ nco_glb_att_add /* [fnc] Add global attributes */
20160330: answer is overwrite. otherwise, climo_nco.sh produces ANN file with, e.g.,
:climo_script = "climo_nco.shclimo_nco.shclimo_nco.sh" ;
:climo_hostname = "aerosolaerosolaerosol" ;
- :climo_version = "4.5.6-alpha054.5.6-alpha054.5.6-alpha05" ; */
+ :climo_version = "4.5.6-alpha374.5.6-alpha374.5.6-alpha37" ; */
gaa_aed.mode=aed_overwrite;
/* Write attribute to disk */
(void)nco_aed_prc(out_id,NC_GLOBAL,gaa_aed);
diff --git a/src/nco/nco_cln_utl.c b/src/nco/nco_cln_utl.c
index 0abc111..3e0ad80 100644
--- a/src/nco/nco_cln_utl.c
+++ b/src/nco/nco_cln_utl.c
@@ -132,291 +132,6 @@ nco_newdate /* [fnc] Compute date a specified number of days from input date */
return newdate_YYMMDD;
} /* end nco_newdate() */
-#ifdef ENABLE_UDUNITS
-# ifdef HAVE_UDUNITS2_H
-
-/* UDUnits2 routines */
-
-int /* [rcd] Return code */
-nco_cln_clc_dff /* [fnc] UDUnits2 Compute difference between two coordinate units */
-(const char *fl_unt_sng, /* I [ptr] units attribute string from disk */
- const char *fl_bs_sng, /* I [ptr] units attribute string from disk */
- double crr_val,
- double *og_val) /* O [] Difference between two units strings */
-{
- const char fnc_nm[]="nco_cln_clc_dff()"; /* [sng] Function name */
-
- cv_converter *ut_cnv; /* UDUnits converter */
-
- int ut_rcd; /* [enm] UDUnits2 status */
-
- ut_system *ut_sys;
- ut_unit *ut_sct_in; /* [sct] UDUnits structure, input units */
- ut_unit *ut_sct_out; /* [sct] UDUnits structure, output units */
-
- /* Quick return if units identical */
- if(!strcasecmp(fl_unt_sng,fl_bs_sng)){
- *og_val=crr_val;
- return NCO_NOERR;
- } /* end if */
-
- /* When empty, ut_read_xml() uses environment variable UDUNITS2_XML_PATH, if any
- Otherwise it uses default initial location hardcoded when library was built */
- if(nco_dbg_lvl_get() >= nco_dbg_vrb) ut_set_error_message_handler(ut_write_to_stderr); else ut_set_error_message_handler(ut_ignore);
- ut_sys=ut_read_xml(NULL);
- if(ut_sys == NULL){
- (void)fprintf(stdout,"%s: %s() failed to initialize UDUnits2 library\n",nco_prg_nm_get(),fnc_nm);
- return NCO_ERR; /* Failure */
- } /* end if err */
-
- /* Units string to convert from */
- ut_sct_in=ut_parse(ut_sys,fl_unt_sng,UT_ASCII);
- if(!ut_sct_in){ /* Problem with 'units' attribute */
- ut_rcd=ut_get_status(); /* [enm] UDUnits2 status */
- if(ut_rcd == UT_BAD_ARG) (void)fprintf(stderr,"ERROR: empty units attribute string\n");
- if(ut_rcd == UT_SYNTAX) (void)fprintf(stderr,"ERROR: units attribute \"%s\" has a syntax error\n",fl_unt_sng);
- if(ut_rcd == UT_UNKNOWN) (void)fprintf(stderr,"ERROR: units attribute \"%s\" is not listed in UDUnits2 SI system database\n",fl_unt_sng);
- return NCO_ERR; /* Failure */
- } /* endif coordinate on disk has no units attribute */
-
- /* Units string to convert to */
- ut_sct_out=ut_parse(ut_sys,fl_bs_sng,UT_ASCII);
- if(!ut_sct_out){ /* Problem with 'units' attribute */
- ut_rcd=ut_get_status(); /* [enm] UDUnits2 status */
- if(ut_rcd == UT_BAD_ARG) (void)fprintf(stderr,"ERROR: Empty units attribute string\n");
- if(ut_rcd == UT_SYNTAX) (void)fprintf(stderr,"ERROR: units attribute \"%s\" has a syntax error\n",fl_bs_sng);
- if(ut_rcd == UT_UNKNOWN) (void)fprintf(stderr,"ERROR: units attribute \"%s\" is not listed in UDUnits2 SI system database\n",fl_bs_sng);
- return NCO_ERR; /* Failure */
- } /* endif */
-
- /* Create converter */
- ut_cnv=ut_get_converter(ut_sct_in,ut_sct_out); /* UDUnits converter */
- if(!ut_cnv){
- ut_rcd=ut_get_status(); /* [enm] UDUnits2 status */
- if(ut_rcd == UT_BAD_ARG) (void)fprintf(stderr,"WARNING: One of units, %s or %s, is NULL\n",fl_bs_sng,fl_unt_sng);
- if(ut_rcd == UT_NOT_SAME_SYSTEM) (void)fprintf(stderr,"WARNING: Units %s and %s belong to different unit systems\n",fl_bs_sng,fl_unt_sng);
- if(ut_rcd == UT_MEANINGLESS) (void)fprintf(stderr,"WARNING: Conversion between user-specified unit \"%s\" and file units \"%s\" is meaningless\n",fl_bs_sng,fl_unt_sng);
- return NCO_ERR; /* Failure */
- } /* endif */
-
- /* Convert */
- *og_val=cv_convert_double(ut_cnv,crr_val);
-
- if(nco_dbg_lvl_get() >= nco_dbg_var) fprintf(stderr, "%s: INFO %s() reports conversion between systems \"%s\" and \"%s\" is %f\n",nco_prg_nm_get(),fnc_nm,fl_unt_sng,fl_bs_sng,*og_val);
-
- ut_free(ut_sct_in);
- ut_free(ut_sct_out);
- cv_free(ut_cnv);
- ut_free_system(ut_sys); /* Free memory taken by UDUnits library */
-
- return NCO_NOERR;
-} /* end UDUnits2 nco_cln_clc_dff() */
-
-int /* [rcd] Successful conversion returns NCO_NOERR */
-nco_cln_prs_tm /* UDUnits2 Extract time stamp from parsed UDUnits string */
-(const char *unt_sng, /* I [ptr] units attribute string */
- tm_cln_sct *tm_in) /* O [sct] Time structure to be populated */
-{
- const char fnc_nm[]="nco_cln_prs_tm()"; /* [sng] Function name */
-
- /* 20141230: fxm figure out a better length */
- char bfr[200];
-
- char *dt_sng;
-
- int ut_rcd; /* [enm] UDUnits2 status */
-
- ut_system *ut_sys;
- ut_unit *ut_sct_in; /* UDUnits structure, input units */
-
- /* When empty, ut_read_xml() uses environment variable UDUNITS2_XML_PATH, if any
- Otherwise it uses default initial location hardcoded when library was built */
- if(nco_dbg_lvl_get() >= nco_dbg_vrb) ut_set_error_message_handler(ut_write_to_stderr); else ut_set_error_message_handler(ut_ignore);
- ut_sys=ut_read_xml(NULL);
- if(ut_sys == NULL){
- (void)fprintf(stdout,"%s: %s failed to initialize UDUnits2 library\n",nco_prg_nm_get(),fnc_nm);
- return NCO_ERR; /* Failure */
- } /* end if err */
-
- /* Units string to convert from */
- ut_sct_in=ut_parse(ut_sys,unt_sng,UT_ASCII);
- if(ut_sct_in == NULL){ /* Problem with 'units' attribute */
- ut_rcd=ut_get_status(); /* [enm] UDUnits2 status */
- if(ut_rcd == UT_BAD_ARG) (void)fprintf(stderr,"ERROR: empty units attribute string\n");
- if(ut_rcd == UT_SYNTAX) (void)fprintf(stderr,"ERROR: units attribute \"%s\" has a syntax error\n",unt_sng);
- if(ut_rcd == UT_UNKNOWN) (void)fprintf(stderr,"ERROR: units attribute \"%s\" is not listed in UDUnits2 SI system database\n",unt_sng);
-
- return NCO_ERR; /* Failure */
- } /* endif coordinate on disk has no units attribute */
-
- /* Print timestamp to buffer in standard, dependable format */
- ut_format(ut_sct_in,bfr,sizeof(bfr),UT_ASCII|UT_NAMES);
-
- /* Extract parsed time units from print string (kludgy)
- 20141230 change to using ut_decode_time() instead? */
- dt_sng=strstr(bfr,"since");
- sscanf(dt_sng,"%*s %d-%d-%d %d:%d:%f",&tm_in->year,&tm_in->month,&tm_in->day,&tm_in->hour,&tm_in->min,&tm_in->sec);
-
- ut_free_system(ut_sys); /* Free memory taken by UDUnits library */
- ut_free(ut_sct_in);
-
- return NCO_NOERR;
-} /* end UDUnits2 nco_cln_prs_tm() */
-
-# else /* !HAVE_UDUNITS2_H */
-
-/* UDUnits1 routines */
-
-int /* [rcd] Successful conversion returns NCO_NOERR */
-nco_cln_clc_dff /* [fnc] UDUnits1 Difference between two co-ordinate units */
-(const char *fl_unt_sng, /* I [ptr] units attribute string from disk */
- const char *fl_bs_sng, /* I [ptr] units attribute string from disk */
- double crr_val,
- double *og_val) /* O [ptr] */
-{
- const char fnc_nm[]="nco_cln_clc_dff()"; /* [sng] Function name */
-
- double slp;
- double incpt;
-
- int rcd;
-
- utUnit udu_sct_in; /* UDUnits structure, input units */
- utUnit udu_sct_out; /* UDUnits structure, output units */
-
- /* Quick return if units identical */
- if(!strcmp(fl_unt_sng,fl_bs_sng) ){
- *og_val=crr_val;
- return NCO_NOERR;
- } /* endif */
-
-#ifdef UDUNITS_PATH
- /* UDUNITS_PATH macro expands to where autoconf found database file */
- rcd=utInit(UDUNITS_PATH);
-#else /* !UDUNITS_PATH */
- /* When empty, utInit() uses environment variable UDUNITS_PATH, if any
- Otherwise it uses default initial location hardcoded when library was built */
- rcd=utInit("");
-#endif /* !UDUNITS_PATH */
-
- if(rcd != UDUNITS_NOERR){
- (void)fprintf(stdout,"%s: %s failed to initialize UDUnits2 library\n",nco_prg_nm_get(),fnc_nm);
- return NCO_ERR;
- } /* end if err */
-
- /* units string to convert from */
- rcd=utScan(fl_unt_sng,&udu_sct_in);
- if(rcd != UDUNITS_NOERR){
- if(rcd == UT_EINVALID) (void)fprintf(stderr,"ERROR: units attribute \"%s\" is invalid \n",fl_unt_sng);
- if(rcd == UT_ESYNTAX) (void)fprintf(stderr,"ERROR units attribute \"%s\" contains a syntax error",fl_unt_sng);
- if(rcd == UT_EUNKNOWN) (void)fprintf(stderr,"ERROR units attribute \"%s\" is not in udunits database",fl_unt_sng);
- (void)utTerm(); /* Free memory taken by UDUnits library */
- return NCO_ERR;
- } /* endif unkown type */
-
- /* units string to convert to */
- rcd=utScan(fl_bs_sng,&udu_sct_out);
- if(rcd != UDUNITS_NOERR){
- if(rcd == UT_EINVALID) (void)fprintf(stderr,"ERROR: units attribute \"%s\" is invalid \n",fl_bs_sng);
- if(rcd == UT_ESYNTAX) (void)fprintf(stderr,"ERROR units attribute \"%s\" contains a syntax error",fl_bs_sng);
- if(rcd == UT_EUNKNOWN) (void)fprintf(stderr,"ERROR units attribute \"%s\" is not in udunits database",fl_bs_sng);
- (void)utTerm(); /* Free memory taken by UDUnits library */
- return NCO_ERR;
- } /* endif unkown type */
-
- rcd=utConvert(&udu_sct_in,&udu_sct_out,&slp,&incpt);
- if(rcd == UT_ECONVERT){
- (void)fprintf(stderr,"ERROR: user specified unit \"%s\" cannot be converted to units \"%s\"\n",fl_unt_sng,fl_bs_sng);
- (void)utTerm();
- return NCO_ERR;
- } /* endif */
-
- *og_val=crr_val*slp+incpt;
-
- /* debug stuff */
- if(nco_dbg_lvl_get() > nco_dbg_std) (void)fprintf(stderr,"%s: %s reports difference between systems \"%s\" and \"%s\" is %f\n",nco_prg_nm_get(),fnc_nm,fl_unt_sng,fl_bs_sng,*og_val);
-
- (void)utTerm();
-
- return NCO_NOERR;
-} /* end UDUnits1 nco_cln_clc_dff() */
-
-int /* [rcd] Successful conversion returns NCO_NOERR */
-nco_cln_prs_tm /* UDUnits1 Extract time stamp from a parsed udunits string */
-(const char *unt_sng, /* I [ptr] units attribute string */
- tm_cln_sct *tm_in) /* O [sct] Time structure to be populated */
-{
- const char fnc_nm[]="nco_cln_prs_tm()"; /* [sng] Function name */
-
- int rcd;
-
- utUnit udu_sct_in; /* UDUnits structure, input units */
-
-#ifdef UDUNITS_PATH
- /* UDUNITS_PATH macro expands to where autoconf found database file */
- rcd=utInit(UDUNITS_PATH);
-#else /* !UDUNITS_PATH */
- /* When empty, utInit() uses environment variable UDUNITS_PATH, if any
- Otherwise it uses default initial location hardcoded when library was built */
- rcd=utInit("");
-#endif /* !UDUNITS_PATH */
-
- if(rcd != UDUNITS_NOERR){
- (void)fprintf(stdout,"%s: %s failed to initialize UDUnits library\n",nco_prg_nm_get(),fnc_nm);
- return NCO_ERR;
- } /* end if err */
-
- /* Units string to convert from */
- rcd=utScan(unt_sng,&udu_sct_in);
- if(rcd != UDUNITS_NOERR){
- if(rcd == UT_EINVALID) (void)fprintf(stderr,"ERROR: units attribute \"%s\" is invalid \n",unt_sng);
- if(rcd == UT_ESYNTAX) (void)fprintf(stderr,"ERROR units attribute \"%s\" contains a syntax error",unt_sng);
- if(rcd == UT_EUNKNOWN) (void)fprintf(stderr,"ERROR units attribute \"%s\" is not in udunits database",unt_sng);
- (void)utTerm(); /* Free memory taken by UDUnits library */
- return NCO_ERR;
- } /* endif unkown type */
-
- /* Extract time origin */
- if(utIsTime(&udu_sct_in)){
- utCalendar(0.0,&udu_sct_in,&tm_in->year,&tm_in->month,&tm_in->day,&tm_in->hour,&tm_in->min,&tm_in->sec);
- rcd=NCO_NOERR;
- }else{
- rcd=NCO_ERR;
- } /* endelse */
-
- (void)utTerm(); /* Free memory taken by UDUnits library */
- return rcd;
-
-} /* end UDUnits1 nco_cln_prs_tm() */
-
-# endif /* !HAVE_UDUNITS2 */
-
-#else /* !ENABLE_UDUNITS */
-
-/* No UDUnits implementation available */
-
-/* Stubs to enable compilation without UDUnits */
-
-int /* [rcd] Successful conversion returns NCO_NOERR */
-nco_cln_clc_dff( /* [fnc] Difference between two co-ordinate units */
-const char *fl_unt_sng, /* I [ptr] units attribute string from disk */
-const char *fl_bs_sng, /* I [ptr] units attribute string from disk */
-double crr_val,
-double *og_val){ /* O [ptr] */
- return NCO_NOERR;
-}
-
-int /* [rcd] Successful conversion returns NCO_NOERR */
-nco_cln_prs_tm( /* Extract time stamp from a parsed UDUnits string */
-const char *unt_sng, /* I [ptr] units attribute string */
-tm_cln_sct *tm_in){ /* O [sct] struct to be populated */
- return NCO_NOERR;
-}
-
-#endif /* !ENABLE_UDUNITS */
-
-/* End UDUnits-related routines*/
-
tm_typ /* O [enm] Units type */
nco_cln_get_tm_typ /* Returns time unit type or tm_void if not found */
(const char *ud_sng){ /* I [ptr] Units string */
@@ -548,10 +263,9 @@ nco_cln_pop_val /* [fnc] Calculate value in cln_sct */
return;
} /* end nco_cln_pop_val() */
-double /* O [dbl] relative time */
-nco_cln_rel_val
-(double offset, /* I [dbl] time in base units */
- nco_cln_typ lmt_cln, /* I [enm] Calendar type */
+double /* O [dbl] time in (base) seconds of tm_typ */
+nco_cln_val_tm_typ
+( nco_cln_typ lmt_cln, /* I [enm] Calendar type */
tm_typ bs_tm_typ) /* I [enm] Time units */
{
double *data=NULL_CEWI;
@@ -596,16 +310,354 @@ nco_cln_rel_val
break;
} /* end switch */
- return offset/scl;
-} /* end nco_cln_rel_val() */
+ return scl;
+} /* end nco_cln_typ_val() */
+
+
+int /* O [flg] String is calendar date */
+nco_cln_chk_tm /* [fnc] Is string a UDUnits-compatible calendar format, e.g., "PERIOD since REFERENCE_DATE" */
+(const char *unit_sng) /* I [sng] Units string */
+{
+ /* Purpose:
+ Determine whether the string is a UDUnits-compatible calendar format, e.g., "PERIOD since REFERENCE_DATE" */
+
+ /* Does string contain date keyword? */
+ if(strcasestr(unit_sng," from ") || strcasestr(unit_sng," since ") || strcasestr(unit_sng," after ")) return True; else return False;
+
+} /* end nco_cln_chk_tm() */
+
+#ifndef ENABLE_UDUNITS
+/* Stub functions to compile without UDUNITS2 */
+
+int /* [flg] NCO_NOERR or NCO_ERR */
+nco_cln_clc_dbl_var_dff( /* [fnc] difference between two co-ordinate units */
+const char *fl_unt_sng, /* I [ptr] units attribute string from disk */
+const char *fl_bs_sng, /* I [ptr] units attribute string from disk */
+nco_cln_typ lmt_cln, /* I [enum] Calendar type of coordinate var */
+double *dval, /* I/O [dbl] var values modified */
+var_sct *var) /* I/O [var_sct] var values modified */
+{
+ return NCO_NOERR;
+}
+
+int /* [flg] NCO_NOERR or NCO_ERR */
+nco_cln_clc_dbl_org( /* [fnc] difference between two co-ordinate units */
+const char *val_unt_sng, /* I [ptr] input value and units in the same string */
+const char *fl_bs_sng, /* I [ptr] units attribute string from disk */
+nco_cln_typ lmt_cln, /* I [enum] Calendar type of coordinate var */
+double *og_val) /* O [dbl] output value */
+{
+ *og_val=0.0; /* CEWI */
+ return NCO_NOERR;
+}
+
+int /* [rcd] Return code */
+nco_cln_sng_rbs /* [fnc] Rebase calendar string for legibility */
+(const ptr_unn val, /* I [sct] Value to rebase */
+ const long val_idx, /* I [idx] Index into 1-D array of values */
+ const nc_type val_typ, /* I [enm] Value type */
+ const char *unit_sng, /* I [sng] Units string */
+ char *lgb_sng) /* O [sng] Legible version of input string */
+{
+ lgb_sng[0]='\0'; /* CEWI */
+ return NCO_NOERR;
+} /* end nco_cln_sng_rbs() */
+
+#endif /* !ENABLE_UDUNITS */
+
+#ifdef ENABLE_UDUNITS
+# ifdef HAVE_UDUNITS2_H
+/* UDUnits2 routines */
+cv_converter* /* UDUnits converter */
+nco_cln_cnv_mk /* [fnc] UDUnits2 create a custom converter */
+(const char *fl_unt_sng, /* I [ptr] units attribute string from disk */
+ const char *fl_bs_sng) /* I [ptr] units attribute string from disk */
+{
+ const char fnc_nm[]="nco_cln_cnv_mk"; /* [sng] Function name */
+
+ cv_converter *ut_cnv; /* UDUnits converter */
+
+ int ut_rcd; /* [enm] UDUnits2 status */
+
+ ut_system *ut_sys;
+ ut_unit *ut_sct_in; /* [sct] UDUnits structure, input units */
+ ut_unit *ut_sct_out; /* [sct] UDUnits structure, output units */
+
+ /* When empty, ut_read_xml() uses environment variable UDUNITS2_XML_PATH, if any
+ Otherwise it uses default initial location hardcoded when library was built */
+ if(nco_dbg_lvl_get() >= nco_dbg_vrb) ut_set_error_message_handler(ut_write_to_stderr); else ut_set_error_message_handler(ut_ignore);
+ ut_sys=ut_read_xml(NULL);
+ if(ut_sys == NULL){
+ (void)fprintf(stdout,"%s: %s() failed to initialize UDUnits2 library\n",nco_prg_nm_get(),fnc_nm);
+ return (cv_converter*)NULL; /* Failure */
+ } /* end if err */
+
+ /* Units string to convert from */
+ ut_sct_in=ut_parse(ut_sys,fl_unt_sng,UT_ASCII);
+ if(!ut_sct_in){ /* Problem with 'units' attribute */
+ ut_rcd=ut_get_status(); /* [enm] UDUnits2 status */
+ if(ut_rcd == UT_BAD_ARG) (void)fprintf(stderr,"ERROR: empty units attribute string\n");
+ if(ut_rcd == UT_SYNTAX) (void)fprintf(stderr,"ERROR: units attribute \"%s\" has a syntax error\n",fl_unt_sng);
+ if(ut_rcd == UT_UNKNOWN) (void)fprintf(stderr,"ERROR: units attribute \"%s\" is not listed in UDUnits2 SI system database\n",fl_unt_sng);
+ return (cv_converter*)NULL; /* Failure */
+ } /* endif coordinate on disk has no units attribute */
+
+ /* Units string to convert to */
+ ut_sct_out=ut_parse(ut_sys,fl_bs_sng,UT_ASCII);
+ if(!ut_sct_out){ /* Problem with 'units' attribute */
+ ut_rcd=ut_get_status(); /* [enm] UDUnits2 status */
+ if(ut_rcd == UT_BAD_ARG) (void)fprintf(stderr,"ERROR: Empty units attribute string\n");
+ if(ut_rcd == UT_SYNTAX) (void)fprintf(stderr,"ERROR: units attribute \"%s\" has a syntax error\n",fl_bs_sng);
+ if(ut_rcd == UT_UNKNOWN) (void)fprintf(stderr,"ERROR: units attribute \"%s\" is not listed in UDUnits2 SI system database\n",fl_bs_sng);
+ return (cv_converter*)NULL; /* Failure */
+ } /* endif */
+
+ /* Create converter */
+ ut_cnv=ut_get_converter(ut_sct_in,ut_sct_out); /* UDUnits converter */
+ if(!ut_cnv){
+ ut_rcd=ut_get_status(); /* [enm] UDUnits2 status */
+ if(ut_rcd == UT_BAD_ARG) (void)fprintf(stderr,"WARNING: One of units, %s or %s, is NULL\n",fl_bs_sng,fl_unt_sng);
+ if(ut_rcd == UT_NOT_SAME_SYSTEM) (void)fprintf(stderr,"WARNING: Units %s and %s belong to different unit systems\n",fl_bs_sng,fl_unt_sng);
+ if(ut_rcd == UT_MEANINGLESS) (void)fprintf(stderr,"WARNING: Conversion between user-specified unit \"%s\" and file units \"%s\" is meaningless\n",fl_bs_sng,fl_unt_sng);
+ return (cv_converter*)NULL; /* Failure */
+ } /* endif */
+
+ ut_free(ut_sct_in);
+ ut_free(ut_sct_out);
+ ut_free_system(ut_sys); /* Free memory taken by UDUnits library */
+ // cv_free(ut_cnv);
+
+
+ return ut_cnv;
+} /* end UDUnits2 nco_cln_cnv_mk() */
+
+
+int /* [flg] NCO_NOERR or NCO_ERR */
+nco_cln_clc_dbl_dff( /* [fnc] difference between two co-ordinate units */
+const char *fl_unt_sng, /* I [ptr] units attribute string from disk */
+const char *fl_bs_sng, /* I [ptr] units attribute string from disk */
+double *og_val)
+{ /* I/O [dbl] var values modified */
+
+ cv_converter *ut_cnv=NULL;
+ const char fnc_nm[]="nco_cln_clc_var_dff()"; /* [sng] Function name */
+
+
+ /* do nothing if units identical */
+ if(strcasecmp(fl_unt_sng,fl_bs_sng)==0)
+ return NCO_NOERR;
+
+ /* Convert */
+ ut_cnv=nco_cln_cnv_mk(fl_unt_sng, fl_bs_sng);
+
+ if(ut_cnv != NULL)
+ og_val[0]=cv_convert_double(ut_cnv,og_val[0]);
+ else
+ return NCO_ERR;
+
+ cv_free(ut_cnv);
+
+ return NCO_NOERR;
+}
+
+
+int /* [flg] NCO_NOERR or NCO_ERR */
+nco_cln_clc_var_dff( /* [fnc] difference between two co-ordinate units */
+const char *fl_unt_sng, /* I [ptr] units attribute string from disk */
+const char *fl_bs_sng, /* I [ptr] units attribute string from disk */
+var_sct *var)
+{ /* I/O [dbl] var values modified */
+
+ size_t sz;
+ size_t idx;
+ double *dp;
+ ptr_unn op1;
+
+ cv_converter *ut_cnv=NULL;
+ const char fnc_nm[]="nco_cln_clc_var_dff()"; /* [sng] Function name */
+
+
+ /* do nothing if units identical */
+ if(strcasecmp(fl_unt_sng,fl_bs_sng)==0)
+ return NCO_NOERR;
+
+ /* Convert */
+ ut_cnv=nco_cln_cnv_mk(fl_unt_sng, fl_bs_sng);
+
+ if(ut_cnv == NULL)
+ return NCO_ERR;
+
+ sz=var->sz;
+
+ (void)cast_void_nctype(var->type,&op1);
+ op1=var->val;
+
+ if(var->type==NC_DOUBLE)
+ {
+
+ double *dp;
+ dp=op1.dp;
+
+ if(var->has_mss_val)
+ {
+ double mss_dbl=var->mss_val.dp[0];
+ for(idx=0; idx<sz; idx++)
+ if( dp[idx] != mss_dbl)
+ dp[idx]=cv_convert_double(ut_cnv,dp[idx]);
+ }else
+ (void)cv_convert_doubles(ut_cnv,dp,sz,dp);
+
+
+ }
+
+ else if(var->type==NC_FLOAT)
+ {
+
+ float*fp;
+ fp=op1.fp;
+
+ if(var->has_mss_val)
+ {
+ float mss_fp=var->mss_val.fp[0];
+ for(idx=0; idx<sz; idx++)
+ if( fp[idx] != mss_fp)
+ fp[idx]=cv_convert_float(ut_cnv,fp[idx]);
+ }
+ else
+ (void)cv_convert_floats(ut_cnv,fp,sz,fp);
+
+
+ }
+
+
+ cv_free(ut_cnv);
+ (void)cast_nctype_void(var->type,&op1);
+
+ return NCO_NOERR;
+
+}
+
+
+int /* [flg] NCO_NOERR or NCO_ERR */
+nco_cln_clc_dbl_var_dff( /* [fnc] difference between two co-ordinate units */
+const char *fl_unt_sng, /* I [ptr] units attribute string from disk */
+const char *fl_bs_sng, /* I [ptr] units attribute string from disk */
+nco_cln_typ lmt_cln, /* I [enum] Calendar type of coordinate var */
+double *og_val, /* I/O [dbl] var values modified -can be NULL */
+var_sct *var) /* I/O [var_sct] var values modified - can be NULL */
+{
+ int rcd;
+ int is_date;
+ const char fnc_nm[]="nco_cln_clc_dv_dff()"; /* [sng] Function name */
+
+ if(nco_dbg_lvl_get() >= nco_dbg_scl)
+ (void)fprintf(stderr,"%s: nco_cln_clc_dbl_var_dff() reports unt_sng=%s bs_sng=%s calendar=%d\n",nco_prg_nm_get(),fl_unt_sng,fl_bs_sng,lmt_cln);
+
+
+
+ /* do nothing if units identical */
+ if(!strcasecmp(fl_unt_sng,fl_bs_sng))
+ return NCO_NOERR;
+
+
+ /* see if target units is of the form "units since date-string" */
+ is_date = nco_cln_chk_tm(fl_bs_sng);
+
+ /* use custom time functions if irregular calendar */
+ if(is_date && (lmt_cln==cln_360 || lmt_cln==cln_365) )
+ rcd=nco_cln_clc_tm( fl_unt_sng, fl_bs_sng,lmt_cln, og_val, var);
+
+ else if(og_val != (double*)NULL)
+ rcd=nco_cln_clc_dbl_dff(fl_unt_sng, fl_bs_sng,og_val);
+
+ else if(var != (var_sct*)NULL)
+ rcd=nco_cln_clc_var_dff(fl_unt_sng, fl_bs_sng,var);
+
+
+
+ return rcd;
+
+} /* end UDUnits2 nco_cln_clc_dff() */
+
+
+
+int /* [flg] NCO_NOERR or NCO_ERR */
+nco_cln_clc_dbl_org( /* [fnc] difference between two co-ordinate units */
+const char *val_unt_sng, /* I [ptr] input value and units in the same string */
+const char *fl_bs_sng, /* I [ptr] units attribute string from disk */
+nco_cln_typ lmt_cln, /* I [enum] Calendar type of coordinate var */
+double *og_val) /* O [dbl] output value */
+{
+ int is_date; /* set to true if date/time unit */
+ int rcd;
+ int month;
+ int year;
+
+ char lcl_unt_sng[200];
+ const char fnc_nm[]="nco_cln_dbl_org"; /* [sng] Function name */
+
+ double dval;
+
+ rcd=0;
+
+ dval=0.0;
+
+ is_date = nco_cln_chk_tm(fl_bs_sng);
+
+ lcl_unt_sng[0]='\0';
+
+ if(nco_dbg_lvl_get() >= nco_dbg_vrb)
+ (void)fprintf(stderr,"%s: INFO %s: reports unt_sng=%s bs_sng=%s calendar=%d\n",nco_prg_nm_get(),fnc_nm,val_unt_sng,fl_bs_sng,lmt_cln);
+
+
+ /* Does fl_unt_sng look like a regular timestamp? */
+ if(is_date && sscanf(val_unt_sng,"%d-%d",&year,&month) == 2)
+ {
+ /* udunits requires this prefix that it can recognize a raw-datetime string */
+ strcpy(lcl_unt_sng,"s@");
+ strcat(lcl_unt_sng,val_unt_sng);
+ }
+ else
+ {
+ /* Regular conversion of fl_unt_sng of form <double_value units>, e.g., '10 inches', '100 ft' '10 days since 1970-01-01' */
+ char *ptr=(char*)NULL;
+
+ dval=strtod(val_unt_sng, &ptr);
+
+ if(ptr==val_unt_sng || strlen(++ptr)<1)
+ {
+ (void)fprintf(stderr, "%s: INFO %s() reports input string must be of the form \"value unit\" got the string \"%s\"\n",nco_prg_nm_get(),fnc_nm,val_unt_sng);
+ nco_exit(EXIT_FAILURE);
+ }
+ strcpy(lcl_unt_sng,ptr);
+ }
+
+ /* use custom time functions if irregular calendar */
+ if(is_date && (lmt_cln==cln_360 || lmt_cln==cln_365) )
+ rcd=nco_cln_clc_tm( lcl_unt_sng, fl_bs_sng,lmt_cln,&dval, (var_sct*)NULL);
+ else
+ rcd=nco_cln_clc_dbl_dff(lcl_unt_sng, fl_bs_sng,&dval);
+
+
+ /* only copy over if successfull */
+ if(rcd==NCO_NOERR)
+ *og_val=dval;
+
+ return rcd;
+}
+
+/* This function is only called if the target units in fl_bs_sng of the form "value unit since date-stamp"
+ and the calendar type is cln_360 or cln_365.
+ either "var" is NULL and there is a single value to process *og_val
+ or var is initialized and og_val is NULL */
int /* [rcd] Successful conversion returns NCO_NOERR */
nco_cln_clc_tm /* [fnc] Difference between two coordinate units */
(const char *fl_unt_sng, /* I [ptr] Units attribute string from disk */
const char *fl_bs_sng, /* I [ptr] Units attribute string from disk */
nco_cln_typ lmt_cln, /* [enum] Calendar type of coordinate variable */
- double *og_val){ /* O [ptr] */
-
+ double *og_val, /* I/O [ptr] */
+ var_sct *var){ /* I/O [ptr] */
int rcd;
int year;
int month;
@@ -613,97 +665,161 @@ nco_cln_clc_tm /* [fnc] Difference between two coordinate units */
/* 20141230 figure out better length */
char tmp_sng[100];
double crr_val;
-
+ double scl_val;
+
+ tm_typ unt_tm_typ; /* enum for units type in fl_unt_sng */
tm_typ bs_tm_typ; /* enum for units type in fl_bs_sng */
+
tm_cln_sct unt_cln_sct;
tm_cln_sct bs_cln_sct;
- if(nco_dbg_lvl_get() >= nco_dbg_scl) (void)fprintf(stderr,"%s: nco_cln_clc_tm() reports unt_sng=%s bs_sng=%s\n",nco_prg_nm_get(),fl_unt_sng,fl_bs_sng);
+ if(nco_dbg_lvl_get() >= nco_dbg_scl)
+ (void)fprintf(stderr,"%s: nco_cln_clc_tm() reports unt_sng=%s bs_sng=%s\n",nco_prg_nm_get(),fl_unt_sng,fl_bs_sng);
+
+ /* blow out if bad cln type */
+ if(lmt_cln != cln_360 && lmt_cln != cln_365)
+ {
+ (void)fprintf(stderr,"%s: nco_cln_clc_tm() has been called with wrong calander types - only cln_365 and cln_360 allowed\n",nco_prg_nm_get());
+ nco_exit(EXIT_FAILURE);
+ }
- /* Does fl_unt_sng look like a regular timestamp? */
- if(sscanf(fl_unt_sng,"%d-%d",&year,&month) == 2){
- lcl_unt_sng=(char *)nco_malloc((strlen(fl_unt_sng)+3L)*sizeof(char));
- strcpy(lcl_unt_sng,"s@");
- strcat(lcl_unt_sng,fl_unt_sng);
- }else{
- lcl_unt_sng=strdup(fl_unt_sng);
- } /* endelse */
-
- /* Temporary until we handle more calendar types */
- if(lmt_cln != cln_360 && lmt_cln != cln_365){
- rcd=nco_cln_clc_dff(lcl_unt_sng,fl_bs_sng,0.0,og_val);
- lcl_unt_sng=(char *)nco_free(lcl_unt_sng);
- return rcd;
- } /* endif */
/* Obtain units type from fl_bs_sng */
if(sscanf(fl_bs_sng,"%s",tmp_sng) != 1) return NCO_ERR;
-
- bs_tm_typ=nco_cln_get_tm_typ(tmp_sng);
+ bs_tm_typ=nco_cln_get_tm_typ(tmp_sng);
+
+ /* Obtain units type from fl_bs_sng */
+ if(sscanf(fl_unt_sng,"%s",tmp_sng) != 1) return NCO_ERR;
+ unt_tm_typ=nco_cln_get_tm_typ(tmp_sng);
+
/* Assume non-standard calendar */
- if(nco_cln_prs_tm(lcl_unt_sng,&unt_cln_sct) == NCO_ERR) return NCO_ERR;
+ if(nco_cln_prs_tm(fl_unt_sng,&unt_cln_sct) == NCO_ERR) return NCO_ERR;
if(nco_cln_prs_tm(fl_bs_sng,&bs_cln_sct) == NCO_ERR) return NCO_ERR;
unt_cln_sct.sc_typ=bs_tm_typ;
bs_cln_sct.sc_typ=bs_tm_typ;
unt_cln_sct.sc_cln=lmt_cln;
- bs_cln_sct.sc_cln=lmt_cln;
-
+ bs_cln_sct.sc_cln=lmt_cln;
(void)nco_cln_pop_val(&unt_cln_sct);
(void)nco_cln_pop_val(&bs_cln_sct);
- crr_val=nco_cln_rel_val(unt_cln_sct.value-bs_cln_sct.value,lmt_cln,bs_tm_typ);
-
- *og_val=crr_val;
+ /* get offset */
+ crr_val= (unt_cln_sct.value-bs_cln_sct.value) / nco_cln_val_tm_typ(lmt_cln,bs_tm_typ);
+
+ /* scale factor */
+ if( unt_tm_typ== bs_tm_typ)
+ scl_val=1.0;
+ else
+ scl_val=nco_cln_val_tm_typ(lmt_cln,unt_tm_typ) / nco_cln_val_tm_typ(lmt_cln,bs_tm_typ); ;
+ if(og_val)
+ {
+ *og_val=*og_val*scl_val+crr_val;
+
+ }
+ else if(var)
+ {
+ size_t sz;
+ size_t idx;
+ ptr_unn op1;
+
+ sz=var->sz;
+ op1=var->val;
+ (void)cast_void_nctype(var->type,&op1);
+
+ if(var->type == NC_DOUBLE)
+ {
+ double *dp;
+ dp=op1.dp;
+
+ if(var->has_mss_val)
+ {
+ double mss_dbl=var->mss_val.dp[0];
+ for(idx=0; idx<sz; idx++)
+ if( dp[idx] != mss_dbl)
+ dp[idx]= dp[idx]*scl_val+crr_val;
+ }
+ else
+ for(idx=0; idx<sz; idx++)
+ dp[idx]= dp[idx]*scl_val+crr_val;
+ }
+
+ if(var->type == NC_FLOAT)
+ {
+ float *fp;
+ fp=op1.fp;
+
+ if(var->has_mss_val)
+ {
+ float mss_ft=var->mss_val.fp[0];
+ for(idx=0; idx<sz; idx++)
+ if( fp[idx] != mss_ft) fp[idx]=fp[idx]*scl_val+crr_val;
+ }
+ else
+ for(idx=0; idx<sz; idx++)
+ fp[idx]=fp[idx]*scl_val+crr_val;
+ }
+
+ (void)cast_nctype_void(var->type,&op1);
+
+ }
lcl_unt_sng=(char *)nco_free(lcl_unt_sng);
return NCO_NOERR;
} /* end nco_cln_clc_tm() */
int /* [rcd] Successful conversion returns NCO_NOERR */
-nco_cln_clc_org /* [fnc] Difference between two generic co-ordinate units */
-(const char *fl_unt_sng, /* I [ptr] Source value (optional) and source units */
- const char *fl_bs_sng, /* I [ptr] Target units */
- nco_cln_typ lmt_cln, /* [enm] Calendar type, if any, of coordinate variable */
- double *og_val) /* O [ptr] Target value in units stored on disk */
+nco_cln_prs_tm /* UDUnits2 Extract time stamp from parsed UDUnits string */
+(const char *unt_sng, /* I [ptr] units attribute string */
+ tm_cln_sct *tm_in) /* O [sct] Time structure to be populated */
{
- /* Purpose:
- Given a value expressed source units (fl_unit_sng) and target units to switch to,
- determine and return the value expressed in the target units. */
+ const char fnc_nm[]="nco_cln_prs_tm()"; /* [sng] Function name */
- int rcd;
-
- char *usr_unt_sng;
- double crr_val;
-
- /* If units contain date or timestamp call special time-conversion routine */
- if(strcasestr(fl_bs_sng," from ") || strcasestr(fl_bs_sng," since ") || strcasestr(fl_bs_sng," after ")){
- rcd=nco_cln_clc_tm(fl_unt_sng,fl_bs_sng,lmt_cln,og_val);
- return rcd;
- } /* endif */
-
- /* Regular conversion of fl_unt_sng of form <double_value units>, e.g., '10 inches', '100 ft' */
- usr_unt_sng=(char *)nco_calloc(strlen(fl_unt_sng)+1L, sizeof(char));
- sscanf(fl_unt_sng,"%lg %s",&crr_val,usr_unt_sng);
- rcd=nco_cln_clc_dff(usr_unt_sng,fl_bs_sng,crr_val,og_val);
- usr_unt_sng=(char *)nco_free(usr_unt_sng);
- return rcd;
-} /* end nco_cln_clc_org() */
+ /* 20141230: fxm figure out a better length */
+ char bfr[200];
-int /* O [flg] String is calendar date */
-nco_cln_chk_tm /* [fnc] Is string a UDUnits-compatible calendar format, e.g., "PERIOD since REFERENCE_DATE" */
-(const char *unit_sng) /* I [sng] Units string */
-{
- /* Purpose:
- Determine whether the string is a UDUnits-compatible calendar format, e.g., "PERIOD since REFERENCE_DATE" */
+ char *dt_sng;
- /* Does string contain date keyword? */
- if(strcasestr(unit_sng," from ") || strcasestr(unit_sng," since ") || strcasestr(unit_sng," after ")) return True; else return False;
+ int ut_rcd; /* [enm] UDUnits2 status */
-} /* end nco_cln_chk_tm() */
+ ut_system *ut_sys;
+ ut_unit *ut_sct_in; /* UDUnits structure, input units */
+
+ /* When empty, ut_read_xml() uses environment variable UDUNITS2_XML_PATH, if any
+ Otherwise it uses default initial location hardcoded when library was built */
+ if(nco_dbg_lvl_get() >= nco_dbg_vrb) ut_set_error_message_handler(ut_write_to_stderr); else ut_set_error_message_handler(ut_ignore);
+ ut_sys=ut_read_xml(NULL);
+ if(ut_sys == NULL){
+ (void)fprintf(stdout,"%s: %s failed to initialize UDUnits2 library\n",nco_prg_nm_get(),fnc_nm);
+ return NCO_ERR; /* Failure */
+ } /* end if err */
+
+ /* Units string to convert from */
+ ut_sct_in=ut_parse(ut_sys,unt_sng,UT_ASCII);
+ if(ut_sct_in == NULL){ /* Problem with 'units' attribute */
+ ut_rcd=ut_get_status(); /* [enm] UDUnits2 status */
+ if(ut_rcd == UT_BAD_ARG) (void)fprintf(stderr,"ERROR: empty units attribute string\n");
+ if(ut_rcd == UT_SYNTAX) (void)fprintf(stderr,"ERROR: units attribute \"%s\" has a syntax error\n",unt_sng);
+ if(ut_rcd == UT_UNKNOWN) (void)fprintf(stderr,"ERROR: units attribute \"%s\" is not listed in UDUnits2 SI system database\n",unt_sng);
+
+ return NCO_ERR; /* Failure */
+ } /* endif coordinate on disk has no units attribute */
+
+ /* Print timestamp to buffer in standard, dependable format */
+ ut_format(ut_sct_in,bfr,sizeof(bfr),UT_ASCII|UT_NAMES);
+
+ /* Extract parsed time units from print string (kludgy)
+ 20141230 change to using ut_decode_time() instead? */
+ dt_sng=strstr(bfr,"since");
+ sscanf(dt_sng,"%*s %d-%d-%d %d:%d:%f",&tm_in->year,&tm_in->month,&tm_in->day,&tm_in->hour,&tm_in->min,&tm_in->sec);
+
+ ut_free_system(ut_sys); /* Free memory taken by UDUnits library */
+ ut_free(ut_sct_in);
+
+ return NCO_NOERR;
+} /* end UDUnits2 nco_cln_prs_tm() */
int /* [rcd] Return code */
nco_cln_sng_rbs /* [fnc] Rebase calendar string for legibility */
@@ -721,8 +837,6 @@ nco_cln_sng_rbs /* [fnc] Rebase calendar string for legibility */
cdRel2Iso() from CDMS by Bob Drach, LLNL
cdParseRelunits() from CDMS by Bob Drach, LLNL */
-#ifdef HAVE_UDUNITS2_H
-
const char fnc_nm[]="nco_cln_sng_rbs()"; /* [sng] Function name */
double val_dbl; /* [day] Calendar offset converted to double */
@@ -773,7 +887,6 @@ nco_cln_sng_rbs /* [fnc] Rebase calendar string for legibility */
ut_free(ut_sct_in);
ut_free(ut_sct_out);
ut_free_system(ut_sys); /* Free memory taken by UDUnits library */
-#endif /* !HAVE_UDUNITS2 */
lgb_sng[0]='\0'; /* CEWI */
@@ -781,3 +894,7 @@ nco_cln_sng_rbs /* [fnc] Rebase calendar string for legibility */
} /* end nco_cln_sng_rbs() */
+# endif /* HAVE_UDUNITS2_H */
+#endif /* ENABLE_UDUNITS */
+/* End UDUnits-related routines*/
+
diff --git a/src/nco/nco_cln_utl.h b/src/nco/nco_cln_utl.h
index e19ef3d..c0d97fa 100644
--- a/src/nco/nco_cln_utl.h
+++ b/src/nco/nco_cln_utl.h
@@ -83,22 +83,11 @@ nco_newdate /* [fnc] Compute date a specified number of days from input date */
(const nco_int date, /* I [YYMMDD] Date */
const nco_int day_srt); /* I [day] Days ahead of input date */
-int /* [flg] NCO_NOERR or NCO_ERR */
-nco_cln_clc_dff( /* [fnc] difference between two co-ordinate units */
-const char *fl_unt_sng, /* I [ptr] units attribute string from disk */
-const char *fl_bs_sng, /* I [ptr] units attribute string from disk */
-double crr_val, /* I [dbl] input units value */
-double *rgn_val); /* O difference between two units string */
-
-int /* [flg] NCO_NOERR or NCO_ERR */
-nco_cln_prs_tm( /* Extract time stamp from a parsed udunits string */
-const char *unt_sng, /* I [ptr] units attribute string */
-tm_cln_sct *tm_in); /* O [sct] struct to be populated */
-
tm_typ /* [enum] Units type */
nco_cln_get_tm_typ( /* returns time unit type or tm_void if not found */
const char *ud_sng); /* I [ptr] units string */
+
nco_cln_typ /* [enum] Calendar type */
nco_cln_get_cln_typ( /* [fnc] Calendar type or cln_nil if not found */
const char *ud_sng); /* I [ptr] units string */
@@ -112,29 +101,75 @@ void
nco_cln_pop_val( /* [fnc] Calculate value in cln_sct */
tm_cln_sct *cln_sct);/* I/O [ptr] Calendar structure */
-double /* O [dbl] Relative time */
-nco_cln_rel_val( /* [fnc] */
-double offset, /* I [dbl] time in base units */
+double /* O [dbl] time in (base) seconds of tm_typ */
+nco_cln_val_tm_typ( /* [fnc] */
nco_cln_typ lmt_cln, /* I [enum] Calendar type */
tm_typ bs_tm_typ); /* I [enum] Time units */
+
+int /* O [flg] String is calendar date */
+nco_cln_chk_tm /* [fnc] Is string a UDUnits-compatible calendar format, e.g., "PERIOD since REFERENCE_DATE" */
+(const char *unit_sng); /* I [sng] Units string */
+
+#ifndef ENABLE_UDUNITS
+
+int /* [flg] NCO_NOERR or NCO_ERR */
+nco_cln_clc_dbl_var_dff( /* [fnc] difference between two co-ordinate units */
+const char *fl_unt_sng, /* I [ptr] units attribute string from disk */
+const char *fl_bs_sng, /* I [ptr] units attribute string from disk */
+nco_cln_typ lmt_cln, /* I [enum] Calendar type of coordinate var */
+double *dval, /* I/O [dbl] var values modified */
+var_sct *var); /* I/O [var_sct] var values modified */
+
+int /* [flg] NCO_NOERR or NCO_ERR */
+nco_cln_clc_dbl_org( /* [fnc] difference between two co-ordinate units */
+const char *val_unt_sng, /* I [ptr] input value and units in the same string */
+const char *fl_bs_sng, /* I [ptr] units attribute string from disk */
+nco_cln_typ lmt_cln, /* I [enum] Calendar type of coordinate var */
+double *og_val); /* O [dbl] output value */
+
+#endif
+
+
+#ifdef ENABLE_UDUNITS
+#ifdef HAVE_UDUNITS2_H
+
+cv_converter* /* UDUnits converter */
+nco_cln_cnv_mk /* [fnc] UDUnits2 create a custom converter */
+(const char *fl_unt_sng, /* I [ptr] units attribute string from disk */
+ const char *fl_bs_sng);/* I [ptr] units attribute string from disk */
+
+
+int /* [flg] NCO_NOERR or NCO_ERR */
+nco_cln_clc_dbl_var_dff( /* [fnc] difference between two co-ordinate units */
+const char *fl_unt_sng, /* I [ptr] units attribute string from disk */
+const char *fl_bs_sng, /* I [ptr] units attribute string from disk */
+nco_cln_typ lmt_cln, /* I [enum] Calendar type of coordinate var */
+double *dval, /* I/O [dbl] var values modified */
+var_sct *var); /* I/O [var_sct] var values modified */
+
+int /* [flg] NCO_NOERR or NCO_ERR */
+nco_cln_clc_dbl_org( /* [fnc] difference between two co-ordinate units */
+const char *val_unt_sng, /* I [ptr] input value and units in the same string */
+const char *fl_bs_sng, /* I [ptr] units attribute string from disk */
+nco_cln_typ lmt_cln, /* I [enum] Calendar type of coordinate var */
+double *og_val); /* O [dbl] output value */
+
+
int /* O [flg] NCO_NOERR or NCO_ERR */
nco_cln_clc_tm( /* [fnc] Difference between two time coordinate units */
const char *fl_unt_sng, /* I [ptr] user units attribute string */
-const char *fl_bs_sng, /* I [ptr] units attribute string from disk */
-nco_cln_typ lmt_cln, /* [enm] Calendar type of coordinate var */
-double *rgn_val); /* O [ptr] time diff in units based on fl_bs_sng */
+const char *fl_bs_sng, /* I [ptr] units attribute string from disk */
+nco_cln_typ lmt_cln, /* [enm] Calendar type of coordinate var */
+double *rgn_val, /* I/O [ptr] time diff in units based on fl_bs_sng */
+var_sct *var); /* I/O [ptr] */
-int /* O [flg] NCO_NOERR or NCO_ERR */
-nco_cln_clc_org( /* [fnc] Difference between two generic coordinate units */
-const char *fl_unt_sng, /* I [ptr] units attribute string from disk */
-const char *fl_bs_sng, /* I [ptr] units attribute string from disk */
-nco_cln_typ lmt_cln, /* I [enum] Calendar type of coordinate var */
-double *og_val); /* O [ptr] */
-int /* O [flg] String is calendar date */
-nco_cln_chk_tm /* [fnc] Is string a UDUnits-compatible calendar format, e.g., "PERIOD since REFERENCE_DATE" */
-(const char *unit_sng); /* I [sng] Units string */
+int /* [flg] NCO_NOERR or NCO_ERR */
+nco_cln_prs_tm( /* Extract time stamp from a parsed udunits string */
+const char *unt_sng, /* I [ptr] units attribute string */
+tm_cln_sct *tm_in); /* O [sct] struct to be populated */
+
int /* [rcd] Return code */
nco_cln_sng_rbs /* [fnc] Rebase calendar string for legibility */
@@ -144,6 +179,12 @@ nco_cln_sng_rbs /* [fnc] Rebase calendar string for legibility */
const char *unit_sng, /* I [sng] Units string */
char *lgb_sng); /* O [sng] Legible version of input string */
+
+
+#endif /* !HAVE_UDUNITS2_H */
+#endif /* !ENABLE_UDUNITS */
+
+
#ifdef __cplusplus
} /* end extern "C" */
#endif /* __cplusplus */
diff --git a/src/nco/nco_cnk.c b/src/nco/nco_cnk.c
index 76ca347..929cb39 100644
--- a/src/nco/nco_cnk.c
+++ b/src/nco/nco_cnk.c
@@ -138,12 +138,16 @@ nco_cnk_ini /* [fnc] Initialize chunking from user-specified inputs */
const int cnk_nbr, /* I [nbr] Number of chunksizes specified */
const int cnk_map, /* I [enm] Chunking map */
const int cnk_plc, /* I [enm] Chunking policy */
+ const size_t cnk_csh_byt, /* I [B] Chunk cache size */
const size_t cnk_min_byt, /* I [B] Minimize size of variable to chunk */
const size_t cnk_sz_byt, /* I [B] Chunk size in bytes */
const size_t cnk_sz_scl, /* I [nbr] Chunk size scalar */
cnk_sct * const cnk) /* O [sct] Chunking structure */
{
/* Purpose: Initialize chunking from user-specified inputs */
+
+ const char fnc_nm[]="nco_cnk_ini()"; /* [sng] Function name */
+
int rcd=0; /* [enm] Return code */
size_t fl_sys_blk_sz=0UL; /* [nbr] File system blocksize for I/O */
@@ -154,6 +158,7 @@ nco_cnk_ini /* [fnc] Initialize chunking from user-specified inputs */
cnk->cnk_nbr=cnk_nbr;
cnk->cnk_map=cnk_map;
cnk->cnk_plc=cnk_plc;
+ cnk->cnk_csh_byt=cnk_csh_byt;
cnk->cnk_min_byt=cnk_min_byt;
cnk->cnk_sz_byt=cnk_sz_byt;
cnk->cnk_sz_scl=cnk_sz_scl;
@@ -186,6 +191,22 @@ nco_cnk_ini /* [fnc] Initialize chunking from user-specified inputs */
cnk->cnk_sz_byt= (fl_sys_blk_sz > 0ULL) ? fl_sys_blk_sz : NCO_CNK_SZ_BYT_DFL;
} /* end else */
+ if(cnk_csh_byt > 0ULL){
+ /* Use user-specified chunk cache size if available */
+ cnk->cnk_csh_byt=cnk_csh_byt;
+ // 20161128: Placeholder for Jerome
+ // if(nco_dbg_lvl_get() == nco_dbg_jm){
+ // nco_set_chunk_cache(cnk_csh_byt);
+ // }
+ }else{
+ /* Otherwise use filesystem blocksize if valid, otherwise use Linux default */
+ size_t cnk_csh_byt_lbr; /* [B] Chunk cache size in library */
+ size_t nelemsp; /* [nbr] Chunk slots in raw data chunk cache hash table */
+ float pmp_fvr_frc; /* [frc] Preemption favor fraction */
+ nco_get_chunk_cache(&cnk_csh_byt_lbr,&nelemsp,&pmp_fvr_frc);
+ if(nco_dbg_lvl_get() >= nco_dbg_scl) (void)fprintf(stderr,"%s: INFO %s reports cnk_csh_byt_lbr = %ld, nelemsp = %ld, pmp_fvr_frc = %g\n",nco_prg_nm_get(),fnc_nm,cnk_csh_byt_lbr,nelemsp,pmp_fvr_frc);
+ } /* end else */
+
/* Java chunking defaults:
http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/reference/netcdf4Clibrary.html */
if(cnk_min_byt == 0ULL) cnk->cnk_min_byt= (fl_sys_blk_sz > 0ULL) ? 2*fl_sys_blk_sz : NCO_CNK_SZ_MIN_BYT_DFL;
diff --git a/src/nco/nco_cnk.h b/src/nco/nco_cnk.h
index 9e889bf..3a435c1 100644
--- a/src/nco/nco_cnk.h
+++ b/src/nco/nco_cnk.h
@@ -82,6 +82,7 @@ nco_cnk_ini /* [fnc] Initialize chunking from user-specified inputs */
const int cnk_nbr, /* I [nbr] Number of chunksizes specified */
const int cnk_map, /* I [enm] Chunking map */
const int cnk_plc, /* I [enm] Chunking policy */
+ const size_t cnk_csh_byt, /* I [B] Chunk cache size */
const size_t cnk_min_byt, /* I [B] Minimize size of variable to chunk */
const size_t cnk_sz_byt, /* I [B] Chunk size in bytes */
const size_t cnk_sz_scl, /* I [nbr] Chunk size scalar */
diff --git a/src/nco/nco_ctl.c b/src/nco/nco_ctl.c
index a53cbaf..90a12b3 100644
--- a/src/nco/nco_ctl.c
+++ b/src/nco/nco_ctl.c
@@ -843,7 +843,7 @@ nco_nmn_get(void) /* [fnc] Return mnemonic that describes current NCO version */
{
/* Purpose: Return mnemonic describing current NCO version
Always include terminal \n so mnemonic does not dangle */
- return "Argonauts\n";
+ return "Hip\n";
} /* end nco_nmn_get() */
char * /* O [sng] nm_in stripped of any path (i.e., program name stub) */
@@ -966,7 +966,7 @@ nco_usg_prn(void)
opt_sng=(char *)strdup("[-3] [-4] [-6] [-7] [-A] [--bfr byt] [-C] [-c] [--cnk_byt byt] [--cnk_dmn nm,lmn] [--cnk_map map] [--cnk_min byt] [--cnk_plc plc] [--cnk_scl sz] [-D nco_dbg_lvl] [-d ...] [-F] [--fix_rec_crd] [--fl_fmt fmt] [--glb ...] [-h] [--hdf] [--hdr_pad nbr] [-i var,val] [-L lvl] [-l path] [--msa] [--no_tmp_fl] [-O] [-o out.nc] [-p path] [--ppc ...] [-R] [-r] [--ram_all] [-t thr_nbr] [-v ...] [-X box] [-x] [-w wgt_1[,wgt_2]] in_1.nc in_2.nc [out.nc]\n");
break;
case ncks:
- opt_sng=(char *)strdup("[-3] [-4] [-5] [-6] [-7] [-A] [-a] [-b fl_bnr] [--bfr byt] [-C] [-c] [--cdl] [--cnk_byt byt] [--cnk_dmn nm,lmn] [--cnk_map map] [--cnk_min byt] [--cnk_plc plc] [--cnk_scl sz] [-D nco_dbg_lvl] [-d ...] [-F] [--fix_rec_dmn dim] [--fl_fmt fmt] [-G grp:lvl] [-g ...] [--glb ...] [--grp_xtr_var_xcl] [-H] [-h] [--hdn] [--hdr_pad nbr] [--jsn] [--jsn_att_fmt lvl] [-L lvl] [-l path] [-M] [-m] [--map map.nc] [--md5_dgs] [--md5_wrt] [--mk_rec_dmn dim] [--msa] [--no_blank] [...]
+ opt_sng=(char *)strdup("[-3] [-4] [-5] [-6] [-7] [-A] [-a] [-b fl_bnr] [--bfr byt] [-C] [-c] [--cdl] [--cnk_byt byt] [--cnk_dmn nm,lmn] [--cnk_map map] [--cnk_min byt] [--cnk_plc plc] [--cnk_scl sz] [-D nco_dbg_lvl] [-d ...] [-F] [--fix_rec_dmn dim] [--fl_fmt fmt] [-G grp:lvl] [-g ...] [--glb ...] [--grp_xtr_var_xcl] [-H] [-h] [--hdn] [--hdr_pad nbr] [--jsn] [--jsn_fmt lvl] [-L lvl] [-l path] [-M] [-m] [--map map.nc] [--md5_dgs] [--md5_wrt] [--mk_rec_dmn dim] [--msa] [--no_blank] [-- [...]
break;
case ncpdq:
opt_sng=(char *)strdup("[-3] [-4] [-6] [-7] [-A] [-a ...] [--bfr byt] [-C] [-c] [--cnk_byt byt] [--cnk_dmn nm,lmn] [--cnk_map map] [--cnk_min byt] [--cnk_plc plc] [--cnk_scl sz] [-D nco_dbg_lvl] [-d ...] [-F] [--fl_fmt fmt] [-G grp:lvl] [-g ...] [--glb ...] [-h] [--hdf] [--hdr_pad nbr] [-L lvl] [-l path] [-M pck_map] [--mrd] [--msa] [--no_tmp_fl] [-O] [-o out.nc] [-P pck_plc] [-p path] [--ppc ...] [-R] [-r] [--ram_all] [-t thr_nbr] [--unn] [-U] [-v ...] [-X box] [-x] in.nc [out.nc]\n");
@@ -1069,8 +1069,8 @@ nco_usg_prn(void)
if(strstr(opt_sng,"--hdr_pad")) (void)fprintf(stdout," --hdr_pad, --header_pad\tPad output header with nbr bytes\n");
if(strstr(opt_sng,"[-i var,val]")) (void)fprintf(stdout,"-i, --ntp, --interpolate var,val\tInterpolant and value\n");
if(strstr(opt_sng,"[-I]")) (void)fprintf(stdout,"-I, --wgt_msk_crd_var\tDo not weight or mask coordinate variables\n");
- if(strstr(opt_sng,"--jsn")) (void)fprintf(stdout," --jsn\t\tPrint JSON (JavaScript Object Notation)\n");
- if(strstr(opt_sng,"--jsn_att_fmt lvl")) (void)fprintf(stdout," --jsn_att_fmt lvl\tVerbosity of attribute format [0 = least verbose, 2 = most pedantic]\n");
+ if(strstr(opt_sng,"--jsn")) (void)fprintf(stdout," --jsn, --json\tPrint JSON (JavaScript Object Notation)\n");
+ if(strstr(opt_sng,"--jsn_fmt lvl")) (void)fprintf(stdout," --jsn_fmt lvl\tVerbosity of JSON format [0 = least verbose, 2 = most pedantic, add 4 to remove brackets]\n");
#ifdef ENABLE_NETCDF4
if(strstr(opt_sng,"[-L")) (void)fprintf(stdout,"-L, --dfl_lvl, --deflate lvl\tLempel-Ziv deflation/compression (lvl=0..9) for netCDF4 output\n");
#endif /* !ENABLE_NETCDF4 */
diff --git a/src/nco/nco_fl_utl.c b/src/nco/nco_fl_utl.c
index 6b832c0..2b6d39d 100644
--- a/src/nco/nco_fl_utl.c
+++ b/src/nco/nco_fl_utl.c
@@ -433,7 +433,7 @@ nco_fl_lst_mk /* [fnc] Create file list from command line positional arguments *
3. \n allows entries to be separated by carriage returns */
while(((cnv_nbr=fscanf(fp_in,fmt_sng,bfr_in)) != EOF) && (fl_lst_in_lng < FL_LST_IN_MAX_LNG)){
if(cnv_nbr == 0){
- (void)fprintf(stdout,"%s: ERROR stdin input not convertable to filename. HINT: Maximum length for input filenames is %d characters. HINT: Separate filenames with whitespace. Carriage returns are automatically stripped out.\n",nco_prg_nm_get(),FL_NM_IN_MAX_LNG);
+ (void)fprintf(stdout,"%s: ERROR stdin input not convertible to filename. HINT: Maximum length for input filenames is %d characters. HINT: Separate filenames with whitespace. Carriage returns are automatically stripped out.\n",nco_prg_nm_get(),FL_NM_IN_MAX_LNG);
nco_exit(EXIT_FAILURE);
} /* endif err */
fl_nm_lng=strlen(bfr_in);
diff --git a/src/nco/nco_lmt.c b/src/nco/nco_lmt.c
index 1df8179..f043c3b 100644
--- a/src/nco/nco_lmt.c
+++ b/src/nco/nco_lmt.c
@@ -599,6 +599,10 @@ nco_lmt_evl /* [fnc] Parse user-specified limits into hyperslab specifications *
long cnt_rmn_ttl=-1L; /* Total records to be read from this and all remaining files */
long rec_skp_vld_prv_dgn=-1L; /* Records skipped at end of previous valid file, if any (diagnostic only) */
+#ifdef ENABLE_UDUNITS
+ cv_converter *ut_cnv=NULL;
+#endif /* !ENABLE_UDUNITS */
+
lmt=*lmt_ptr;
nco_prg_id=nco_prg_id_get(); /* Program ID */
@@ -745,9 +749,12 @@ nco_lmt_evl /* [fnc] Parse user-specified limits into hyperslab specifications *
cln_sng=nco_lmt_get_udu_att(grp_id,dim.cid,"calendar"); /* Calendar attribute */
if(rec_dmn_and_mfo && fl_udu_sng && lmt.rbs_sng){
+
#ifdef ENABLE_UDUNITS
- /* Re-base and reset origin to 0.0 if re-basing fails */
+ /* Re-base and reset origin to 0.0 if re-basing fails
if(nco_cln_clc_org(fl_udu_sng,lmt.rbs_sng,lmt.lmt_cln,&lmt.origin) != NCO_NOERR) lmt.origin=0.0;
+ */
+
#endif /* !ENABLE_UDUNITS */
} /* endif */
@@ -824,16 +831,16 @@ nco_lmt_evl /* [fnc] Parse user-specified limits into hyperslab specifications *
if(!fl_udu_sng){
(void)fprintf(stdout,"%s: ERROR attempting to read units attribute from variable \"%s\" \n",nco_prg_nm_get(),dim.nm);
- nco_exit(EXIT_FAILURE);
+ nco_exit(EXIT_FAILURE);
} /* end if */
if(lmt.min_sng)
- if(nco_cln_clc_org(lmt.min_sng,fl_udu_sng,lmt.lmt_cln,&lmt.min_val) != NCO_NOERR)
- nco_exit(EXIT_FAILURE);
+ if( nco_cln_clc_dbl_org(lmt.min_sng,fl_udu_sng, lmt.lmt_cln, &lmt.min_val) !=NCO_NOERR )
+ nco_exit(EXIT_FAILURE);
if(lmt.max_sng)
- if(nco_cln_clc_org(lmt.max_sng,fl_udu_sng,lmt.lmt_cln,&lmt.max_val) != NCO_NOERR)
- nco_exit(EXIT_FAILURE);
+ if( nco_cln_clc_dbl_org(lmt.max_sng,fl_udu_sng, lmt.lmt_cln, &lmt.max_val) !=NCO_NOERR )
+ nco_exit(EXIT_FAILURE);
}else{ /* end UDUnits conversion */
/* Convert user-specified limits into double precision numeric values, or supply defaults */
@@ -848,9 +855,19 @@ nco_lmt_evl /* [fnc] Parse user-specified limits into hyperslab specifications *
/* Re-base coordinates as necessary in multi-file operatators (MFOs)
lmt.origin was calculated earlier in routine */
- if(rec_dmn_and_mfo){
- if(lmt.min_sng) lmt.min_val-=lmt.origin;
- if(lmt.max_sng) lmt.max_val-=lmt.origin;
+ if(rec_dmn_and_mfo && fl_udu_sng && lmt.rbs_sng && strcmp(fl_udu_sng, lmt.rbs_sng) ){
+
+ if(lmt.min_sng)
+ if( nco_cln_clc_dbl_var_dff(lmt.rbs_sng,fl_udu_sng, lmt.lmt_cln, &lmt.min_val, (var_sct*)NULL) !=NCO_NOERR )
+ nco_exit(EXIT_FAILURE);
+
+ if(lmt.max_sng)
+ if( nco_cln_clc_dbl_var_dff(lmt.rbs_sng,fl_udu_sng, lmt.lmt_cln, &lmt.max_val, (var_sct*)NULL) !=NCO_NOERR)
+ nco_exit(EXIT_FAILURE);
+
+ if(nco_dbg_lvl_get() > nco_dbg_std)
+ fprintf(stdout,"%s: INFO nco_lmt rebasing min_val=%f max_val=%f\n",nco_prg_nm_get(), lmt.min_val, lmt.max_val);
+
} /* endif MFO */
} /* end UDUnits conversion */
@@ -1509,7 +1526,7 @@ nco_lmt_evl_dmn_crd /* [fnc] Parse user-specified limits into hypersl
if(rec_dmn_and_mfo && fl_udu_sng && lmt.rbs_sng){
#ifdef ENABLE_UDUNITS
/* Re-base and reset origin to 0.0 if re-basing fails */
- if(nco_cln_clc_org(fl_udu_sng,lmt.rbs_sng,lmt.lmt_cln,&lmt.origin) != NCO_NOERR) lmt.origin=0.0;
+ // if(nco_cln_clc_org(fl_udu_sng,lmt.rbs_sng,lmt.lmt_cln,&lmt.origin) != NCO_NOERR) lmt.origin=0.0;
#endif /* !ENABLE_UDUNITS */
} /* endif */
@@ -1588,12 +1605,13 @@ nco_lmt_evl_dmn_crd /* [fnc] Parse user-specified limits into hypersl
} /* end if */
if(lmt.min_sng)
- if(nco_cln_clc_org(lmt.min_sng,fl_udu_sng,lmt.lmt_cln,&lmt.min_val) != NCO_NOERR)
- nco_exit(EXIT_FAILURE);
+ if( nco_cln_clc_dbl_org(lmt.min_sng,fl_udu_sng, lmt.lmt_cln, &lmt.min_val) !=NCO_NOERR )
+ nco_exit(EXIT_FAILURE);
+
if(lmt.max_sng)
- if(nco_cln_clc_org(lmt.max_sng,fl_udu_sng,lmt.lmt_cln,&lmt.max_val) != NCO_NOERR)
- nco_exit(EXIT_FAILURE);
+ if( nco_cln_clc_dbl_org(lmt.max_sng,fl_udu_sng, lmt.lmt_cln, &lmt.max_val) !=NCO_NOERR )
+ nco_exit(EXIT_FAILURE);
}else{ /* end UDUnits conversion */
/* Convert user-specified limits into double precision numeric values, or supply defaults */
@@ -1608,12 +1626,29 @@ nco_lmt_evl_dmn_crd /* [fnc] Parse user-specified limits into hypersl
/* Re-base coordinates as necessary in multi-file operatators (MFOs)
lmt.origin was calculated earlier in routine */
+ /*
if(rec_dmn_and_mfo){
if(lmt.min_sng) lmt.min_val-=lmt.origin;
if(lmt.max_sng) lmt.max_val-=lmt.origin;
+ }
+ */
+ if(rec_dmn_and_mfo && fl_udu_sng && lmt.rbs_sng && strcmp(fl_udu_sng, lmt.rbs_sng) ){
+
+ if(lmt.min_sng)
+ if( nco_cln_clc_dbl_var_dff(lmt.rbs_sng,fl_udu_sng, lmt.lmt_cln, &lmt.min_val, (var_sct*)NULL) !=NCO_NOERR )
+ nco_exit(EXIT_FAILURE);
+
+ if(lmt.max_sng)
+ if( nco_cln_clc_dbl_var_dff(lmt.rbs_sng,fl_udu_sng, lmt.lmt_cln, &lmt.max_val, (var_sct*)NULL) !=NCO_NOERR)
+ nco_exit(EXIT_FAILURE);
+
+ if(nco_dbg_lvl_get() > nco_dbg_std)
+ fprintf(stdout,"%s: INFO nco_lmt rebasing min_val=%f max_val=%f\n",nco_prg_nm_get(), lmt.min_val, lmt.max_val);
+
} /* endif MFO */
} /* end UDUnits conversion */
+
/* Warn when min_val > max_val (i.e., wrapped coordinate) */
if(nco_dbg_lvl_get() > nco_dbg_std && lmt.min_val > lmt.max_val) (void)fprintf(stderr,"%s: INFO Interpreting hyperslab specifications as wrapped coordinates [%s <= %g] and [%s >= %g]\n",nco_prg_nm_get(),lmt.nm,lmt.max_val,lmt.nm,lmt.min_val);
diff --git a/src/nco/nco_netcdf.c b/src/nco/nco_netcdf.c
index fc73f5f..ebce6ed 100644
--- a/src/nco/nco_netcdf.c
+++ b/src/nco/nco_netcdf.c
@@ -76,10 +76,13 @@ nco_err_exit /* [fnc] Print netCDF error message, routine name, then exit */
#endif /* !NCO_ABORT_ON_ERROR */
switch(rcd){
+ case NC_EACCESS: /* netcdf.h added NC_EACCESS in ~2012 */
+ (void)fprintf(stdout,"ERROR NC_EACCESS Access failure\nHINT: NC_EACCESS errors signify a problem receiving data from a DAP server. This can occur, e.g., when NCO requests (with nco_var_get()) more data than the server is configured to dispense at one time. A workaround might be to request smaller chunks of data at one time. This can be accomplished by accessing hyperslabs or multi-slabs of data as described at http://nco.sf.net/nco.html#mlt\n"); break; /* NB: NC_EACCESS added to netc [...]
case NC_EBADTYPE: (void)fprintf(stdout,"ERROR NC_BADTYPE Not a netCDF data type\nHINT: NC_EBADTYPE errors can occur when NCO tries to write netCDF4 features to a netCDF3 file. Features that cannot be defined in a netCDF3 file, and that thus will trigger this error, include groups and netCDF4 atomic types (e.g., NC_STRING, NC_UBYTE). The workaround is to remove all netCDF4 features before attempting the conversion, or to just give up and write a netCDF4 output file instead of a netCDF3 [...]
case NC_ECANTWRITE: (void)fprintf(stdout,"ERROR NC_ECANTWRITE Can't write file\nHINT: NC_ECANTWRITE errors can occur when NCO tries to write to an HDF5 file that is not netCDF4-compliant. One workaround is translate the file to a netCDF4-compliant file first, e.g., with \'ncks in.h5 out.nc\'.\n"); break;
case NC_EINVAL: (void)fprintf(stdout,"ERROR NC_EINVAL Invalid argument\nHINT: NC_EINVAL errors can occur for many reasons. Three common ones are described here. 1. When NCO operators attempt to open netCDF4 files using the diskless option, usually invoked with --diskless_all, --ram_all, or --open_ram. Is your input file netCDF4 format? (http://nco.sf.net/nco.html#fmt_inq shows how to tell.) If so then omitting the diskless option may solve this problem. 2. When HDF4-enabled NCO attem [...]
- case NC_EIO: (void)fprintf(stdout,"ERROR NC_EIO Generic IO error\nHINT: NC_EIO errors can occur when NCO tries to read a file through a non-existant DAP location. Then NCO automatically tries to retrieve the file through another method, e.g., searching for a file of the same name in the current directory. This can trigger a subsequent error. When debugging the problem, first address the originating error (from NC_EIO).\n"); break;
+ case NC_EIO: /* netcdf.h added NC_EIO in ~2012 */
+ (void)fprintf(stdout,"ERROR NC_EIO Generic IO error\nHINT: NC_EIO errors can occur when NCO tries to read a file through a non-existant DAP location. Then NCO automatically tries to retrieve the file through another method, e.g., searching for a file of the same name in the current directory. This can trigger a subsequent error. When debugging the problem, first address the originating error (from NC_EIO).\n"); break;
case NC_ELATEFILL: /* netcdf.h replaced NC_EFILLVALUE by NC_ELATEFILL after about netCDF ~4.2.1 */
(void)fprintf(stdout,"ERROR NC_ELATEFILL (formerly NC_EFILLVALUE) Attempt to define fill value when data already exists\nHINT: NC_ELATEFILL errors can occur when ncap2 attempts to define a variable with a _FillValue attribute in a netCDF4 file. We believe this is an NCO bug (fxm TODO nco1089) and are working to fix it. Does your output file need to be netCDF4 or netCDF4_classic format? If so, then wait for us to fix the bug. If not, change the output format to netCDF3 (e.g., with - [...]
#ifdef ENABLE_NETCDF4
diff --git a/src/nco/nco_netcdf.h b/src/nco/nco_netcdf.h
index fbc1139..5c19190 100644
--- a/src/nco/nco_netcdf.h
+++ b/src/nco/nco_netcdf.h
@@ -164,6 +164,9 @@
#ifndef NC_EIO
# define NC_EIO (-68) /**< Generic IO error */
#endif
+#ifndef NC_EACCESS
+# define NC_EACCESS (-77) /**< Access Failure */
+#endif
/* Some netCDF3 stubs for netCDF4 routines need netCDF4-only return codes
These netCDF4 tokens are never defined in netCDF3-only netcdf.h */
diff --git a/src/nco/nco_pck.c b/src/nco/nco_pck.c
index 47eeaef..01982b7 100644
--- a/src/nco/nco_pck.c
+++ b/src/nco/nco_pck.c
@@ -16,7 +16,7 @@
nco_var_pck(): called just before writing output file, e.g., in main()
Bookkeeping hassle is keeping flags in var_prc synchronized with flags in var_prc_out
- From netCDF User's Guide:
+ From netCDF Users Guide:
scale_factor: If present for a variable, the data are to be multiplied by this factor after the data are read by the application that accesses the data
add_offset: If present for a variable, this number is added to the data after the data are read by the application. If both scale_factor and add_offset attributes are present, the data are first scaled before the offset is added.
When scale_factor and add_offset are used for packing, the associated variable (containing the packed data) is typically of type byte or short, whereas the unpacked values are intended to be of type float or double. Attribute's scale_factor and add_offset should both be of type intended for the unpacked data, e.g., float or double.
diff --git a/src/nco/nco_prn.c b/src/nco/nco_prn.c
index 3713b2c..cde09cc 100644
--- a/src/nco/nco_prn.c
+++ b/src/nco/nco_prn.c
@@ -526,7 +526,14 @@ nco_prn_att /* [fnc] Print all attributes of single variable or group */
default: nco_dfl_case_nc_type_err();
break;
} /* end switch */
- if(CDL) (void)fprintf(stdout," ;\n");
+ if(CDL){
+ if(nco_dbg_lvl_get() >= nco_dbg_std){
+ /* 20161129: Add netCDF attribute type as comment after semi-colon. Yes, "string" is redundant. */
+ (void)fprintf(stdout," ; // %s\n",cdl_typ_nm(att[idx].type));
+ }else{ /* !dbg */
+ (void)fprintf(stdout," ;\n");
+ } /* !dbg */
+ } /* !CDL */
if(TRD) (void)fprintf(stdout,"\n");
if(XML) (void)fprintf(stdout,"\" />\n");
if(JSN){
@@ -1509,6 +1516,7 @@ nco_prn_var_val_trv /* [fnc] Print variable data (GTT version) */
long *dmn_sbs_dsk=NULL_CEWI; /* [nbr] Indices of hyperslab relative to original on disk */
long *dmn_sbs_ram=NULL_CEWI; /* [nbr] Indices in hyperslab */
long *mod_map_cnt=NULL_CEWI; /* [nbr] MSA modulo array */
+ long *mod_map_rv_cnt=NULL_CEWI; /* [nbr] MSA modulo array reverse multiply */
long *mod_map_in=NULL_CEWI; /* [nbr] MSA modulo array */
long lmn; /* [nbr] Index to print variable data */
long sng_lng=long_CEWI; /* [nbr] Length of NC_CHAR string */
@@ -1520,10 +1528,12 @@ nco_prn_var_val_trv /* [fnc] Print variable data (GTT version) */
const nco_bool XML=prn_flg->xml; /* [flg] XML output */
const nco_bool TRD=prn_flg->trd; /* [flg] Traditional output */
const nco_bool JSN=prn_flg->jsn; /* [flg] JSON output */
+
const nco_bool CDL_OR_JSN=prn_flg->cdl || prn_flg->jsn; /* [flg] CDL or JSON output */
const nco_bool CDL_OR_TRD=prn_flg->cdl || prn_flg->trd; /* [flg] CDL or Traditional output */
const nco_bool CDL_OR_JSN_OR_XML=prn_flg->cdl || prn_flg->jsn || prn_flg->xml; /* [flg] CDL or JSON or XML output */
+ nco_bool JSN_BRK=False; /* [flg] JSON output - data bracketed */
nco_bool is_mss_val=False; /* [flg] Current value is missing value */
nco_bool flg_malloc_unit_crd=False; /* [flg] Allocated memory for coordinate units string */
nco_bool flg_malloc_unit_var=False; /* [flg] Allocated memory for variable units string */
@@ -1580,6 +1590,38 @@ nco_prn_var_val_trv /* [fnc] Print variable data (GTT version) */
var.val.vp=nco_msa_rcr_clc((int)0,var.nbr_dim,lmt,lmt_msa,&var);
} /* ! Scalars */
+
+ if(var.nbr_dim)
+ {
+ /* Allocate space for dimension information */
+ dim=(dmn_sct *)nco_malloc(var.nbr_dim*sizeof(dmn_sct));
+ /* Ensure val.vp is NULL-initialized (and thus not inadvertently free'd) when PRN_DMN_IDX_CRD_VAL is False */
+ for(int idx=0;idx<var.nbr_dim;idx++) dim[idx].val.vp=NULL;
+ dmn_sbs_ram=(long *)nco_malloc(var.nbr_dim*sizeof(long));
+ dmn_sbs_dsk=(long *)nco_malloc(var.nbr_dim*sizeof(long));
+ mod_map_cnt=(long *)nco_malloc(var.nbr_dim*sizeof(long));
+ mod_map_rv_cnt=(long *)nco_malloc(var.nbr_dim*sizeof(long));
+ mod_map_in=(long *)nco_malloc(var.nbr_dim*sizeof(long));
+
+ /* Create mod_map_in */
+ for(int idx=0;idx<var.nbr_dim;idx++) mod_map_in[idx]=1L;
+ for(int idx=0;idx<var.nbr_dim;idx++)
+ for(int jdx=idx+1;jdx<var.nbr_dim;jdx++)
+ mod_map_in[idx]*=lmt_msa[jdx]->dmn_sz_org;
+
+ /* Create mod_map_cnt */
+ for(int idx=0;idx<var.nbr_dim;idx++) mod_map_cnt[idx]=1L;
+ for(int idx=0;idx<var.nbr_dim;idx++)
+ for(int jdx=idx;jdx<var.nbr_dim;jdx++)
+ mod_map_cnt[idx]*=lmt_msa[jdx]->dmn_cnt;
+
+ /* create mod_map_rv_cnt */
+ long rsz=1L;
+ for(int jdx=var.nbr_dim-1;jdx>=0;jdx--)
+ mod_map_rv_cnt[jdx]=rsz*=lmt_msa[jdx]->dmn_cnt;
+
+ }
+
/* Call also initializes var.sz with final size */
if(prn_flg->md5)
if(prn_flg->md5->dgs) (void)nco_md5_chk(prn_flg->md5,var_nm,var.sz*nco_typ_lng(var.type),grp_id,(long *)NULL,(long *)NULL,var.val.vp);
@@ -1669,6 +1711,8 @@ nco_prn_var_val_trv /* [fnc] Print variable data (GTT version) */
(void)sprintf(fmt_sng,"%s",nco_typ_fmt_sng_att_xml(var.type));
/* If var is size=1 (scalar?) then no array brackets */
if(var.sz == 1) (void)fprintf(stdout,"%*s\"data\": ",prn_ndn,spc_sng); else (void)fprintf(stdout,"%*s\"data\": [",prn_ndn,spc_sng);
+ /* use bracketing array if needed */
+ if(prn_flg->jsn_data_brk && var.nbr_dim >=2) JSN_BRK=True;
} /* !JSN */
nm_cdl=nm2sng_cdl(var_nm);
@@ -1732,8 +1776,14 @@ nco_prn_var_val_trv /* [fnc] Print variable data (GTT version) */
for(lmn=0;lmn<var.sz;lmn++){
+ /* do bracketing of data if specified */
+ if(JSN_BRK)
+ for(int bdz=var.nbr_dim-1; bdz>=1 ; bdz--)
+ if( lmn % mod_map_rv_cnt[bdz] == 0)
+ (void)fprintf(stdout,"[");
+
/* memcmp() triggers pedantic warning unless pointer arithmetic is cast to type char * */
- if(prn_flg->PRN_MSS_VAL_BLANK) is_mss_val = var.has_mss_val ? !memcmp((char *)var.val.vp+lmn*val_sz_byt,var.mss_val.vp,(size_t)val_sz_byt) : False;
+ if(prn_flg->PRN_MSS_VAL_BLANK) is_mss_val = var.has_mss_val ? !memcmp((char *)var.val.vp+lmn*val_sz_byt,var.mss_val.vp,(size_t)val_sz_byt) : False;
if(prn_flg->PRN_MSS_VAL_BLANK && is_mss_val){
(void)sprintf(val_sng,"%s",mss_val_sng);
@@ -1784,8 +1834,8 @@ nco_prn_var_val_trv /* [fnc] Print variable data (GTT version) */
if(chr_val == '\n' && lmn != var_szm1) (void)sprintf(sng_val_sng,"%s\",\n%*s\"",sng_val_sng_cpy,prn_ndn+prn_flg->var_fst,spc_sng);
if(lmn%sng_lng == sng_lngm1){
(void)fprintf(stdout,"%s%s",sng_val_sng,(CDL||JSN) ? "\"" : "");
- /* Print separator after non-final string */
- if(lmn != var_szm1) (void)fprintf(stdout,"%s",spr_sng);
+ /* Print separator after non-final string
+ //if(lmn != var_szm1) (void)fprintf(stdout,"%s",spr_sng); */
} /* endif string end */
if(lmn == var_szm1) sng_val_sng=(char *)nco_free(sng_val_sng);
} /* var.nbr_dim > 0 */
@@ -1810,14 +1860,29 @@ nco_prn_var_val_trv /* [fnc] Print variable data (GTT version) */
(void)strcat(sng_val_sng,(*chr2sng_sf)(chr_val,val_sng));
} /* end loop over character */
(void)fprintf(stdout,"%s%s",sng_val_sng,(XML) ? "" : "\"");
- /* Print separator after non-final string */
- if(lmn != var_szm1) (void)fprintf(stdout,"%s",spr_sng);
+ /* Print separator after non-final string nb with json bracketed no comma
+ if(lmn != var_szm1) (void)fprintf(stdout,"%s",spr_sng); */
sng_val_sng=(char *)nco_free(sng_val_sng);
break;
default: nco_dfl_case_nc_type_err(); break;
} /* end switch */
} /* !is_mss_val */
- if(var.type != NC_CHAR && var.type != NC_STRING) (void)fprintf(stdout,"%s%s",val_sng,(lmn != var_szm1) ? spr_sng : "");
+
+ if(var.type != NC_CHAR && var.type != NC_STRING) (void)fprintf(stdout,"%s",val_sng);
+
+ /* do bracketing of data if specified */
+ if(JSN_BRK)
+ for(int bdz=var.nbr_dim-1; bdz>=1 ; bdz--)
+ if( (lmn+1) % mod_map_rv_cnt[bdz] == 0)
+ (void)fprintf(stdout,"]");
+
+
+ if( lmn != var_szm1 )
+ if( (var.type == NC_CHAR && lmn%sng_lng == sng_lngm1) || var.type != NC_CHAR )
+ (void)fprintf(stdout,"%s", spr_sng );
+
+ /* if(var.type != NC_CHAR && var.type != NC_STRING ) (void)fprintf(stdout,"%s%s",val_sng,(lmn != var_szm1) ? spr_sng : ""); */
+
} /* end loop over element */
rcd_prn+=0; /* CEWI */
if(CDL) (void)fprintf(stdout," ;\n");
@@ -1919,27 +1984,26 @@ nco_prn_var_val_trv /* [fnc] Print variable data (GTT version) */
if(var.nbr_dim > 0 && !dlm_sng && TRD){
- /* Allocate space for dimension information */
+ /*
dim=(dmn_sct *)nco_malloc(var.nbr_dim*sizeof(dmn_sct));
- /* Ensure val.vp is NULL-initialized (and thus not inadvertently free'd) when PRN_DMN_IDX_CRD_VAL is False */
for(int idx=0;idx<var.nbr_dim;idx++) dim[idx].val.vp=NULL;
dmn_sbs_ram=(long *)nco_malloc(var.nbr_dim*sizeof(long));
dmn_sbs_dsk=(long *)nco_malloc(var.nbr_dim*sizeof(long));
mod_map_cnt=(long *)nco_malloc(var.nbr_dim*sizeof(long));
mod_map_in=(long *)nco_malloc(var.nbr_dim*sizeof(long));
- /* Create mod_map_in */
for(int idx=0;idx<var.nbr_dim;idx++) mod_map_in[idx]=1L;
for(int idx=0;idx<var.nbr_dim;idx++)
for(int jdx=idx+1;jdx<var.nbr_dim;jdx++)
mod_map_in[idx]*=lmt_msa[jdx]->dmn_sz_org;
- /* Create mod_map_cnt */
for(int idx=0;idx<var.nbr_dim;idx++) mod_map_cnt[idx]=1L;
for(int idx=0;idx<var.nbr_dim;idx++)
for(int jdx=idx;jdx<var.nbr_dim;jdx++)
mod_map_cnt[idx]*=lmt_msa[jdx]->dmn_cnt;
+ */
+
/* Read coordinate dimensions if required */
if(prn_flg->PRN_DMN_IDX_CRD_VAL){
@@ -2225,6 +2289,7 @@ lbl_chr_prn:
if(dmn_sbs_ram) dmn_sbs_ram=(long *)nco_free(dmn_sbs_ram);
if(dmn_sbs_dsk) dmn_sbs_dsk=(long *)nco_free(dmn_sbs_dsk);
if(mod_map_cnt) mod_map_cnt=(long *)nco_free(mod_map_cnt);
+ if(mod_map_rv_cnt) mod_map_rv_cnt=(long *)nco_free(mod_map_rv_cnt);
if(mod_map_in) mod_map_in=(long *)nco_free(mod_map_in);
} /* end if variable has more than one dimension */
diff --git a/src/nco/nco_sng_utl.h b/src/nco/nco_sng_utl.h
index f7d6301..2336c8d 100644
--- a/src/nco/nco_sng_utl.h
+++ b/src/nco/nco_sng_utl.h
@@ -68,6 +68,13 @@ extern "C" {
const size_t chr_nbr); /* I [nbr] Compare at most chr_nbr characters */
#endif /* !NEED_STRCASECMP */
+ /* 20161205 GNU since gcc 4.7.3 provides strcasestr() as non-standard extension iff _GNU_SOURCE is defined */
+#if 0
+#ifdef __GNUC__
+# define _GNU_SOURCE
+#endif /* __GNUC__ */
+#endif
+
/* 20130827 GNU g++ always provides strcasestr(), MSVC never does */
#ifndef __GNUG__
# ifdef NEED_STRCASESTR
diff --git a/src/nco/ncpdq.c b/src/nco/ncpdq.c
index bfb61a2..7a92c6b 100644
--- a/src/nco/ncpdq.c
+++ b/src/nco/ncpdq.c
@@ -204,6 +204,7 @@ main(int argc,char **argv)
nco_bool flg_dmn_prc_usr_spc=False; /* [flg] Processed dimensions specified on command line */
size_t bfr_sz_hnt=NC_SIZEHINT_DEFAULT; /* [B] Buffer size hint */
+ size_t cnk_csh_byt=NCO_CNK_CSH_BYT_DFL; /* [B] Chunk cache size */
size_t cnk_min_byt=NCO_CNK_SZ_MIN_BYT_DFL; /* [B] Minimize size of variable to chunk */
size_t cnk_sz_byt=0UL; /* [B] Chunk size in bytes */
size_t cnk_sz_scl=0UL; /* [nbr] Chunk size scalar */
@@ -662,7 +663,7 @@ main(int argc,char **argv)
fl_out_tmp=nco_fl_out_open(fl_out,&FORCE_APPEND,FORCE_OVERWRITE,fl_out_fmt,&bfr_sz_hnt,RAM_CREATE,RAM_OPEN,WRT_TMP_FL,&out_id);
/* Initialize chunking from user-specified inputs */
- if(fl_out_fmt == NC_FORMAT_NETCDF4 || fl_out_fmt == NC_FORMAT_NETCDF4_CLASSIC) rcd+=nco_cnk_ini(in_id,fl_out,cnk_arg,cnk_nbr,cnk_map,cnk_plc,cnk_min_byt,cnk_sz_byt,cnk_sz_scl,&cnk);
+ if(fl_out_fmt == NC_FORMAT_NETCDF4 || fl_out_fmt == NC_FORMAT_NETCDF4_CLASSIC) rcd+=nco_cnk_ini(in_id,fl_out,cnk_arg,cnk_nbr,cnk_map,cnk_plc,cnk_csh_byt,cnk_min_byt,cnk_sz_byt,cnk_sz_scl,&cnk);
if(IS_REORDER){
diff --git a/src/nco/ncra.c b/src/nco/ncra.c
index 7c2c93a..369a3b5 100644
--- a/src/nco/ncra.c
+++ b/src/nco/ncra.c
@@ -279,6 +279,7 @@ main(int argc,char **argv)
scv_sct wgt_avg_scv;
size_t bfr_sz_hnt=NC_SIZEHINT_DEFAULT; /* [B] Buffer size hint */
+ size_t cnk_csh_byt=NCO_CNK_CSH_BYT_DFL; /* [B] Chunk cache size */
size_t cnk_min_byt=NCO_CNK_SZ_MIN_BYT_DFL; /* [B] Minimize size of variable to chunk */
size_t cnk_sz_byt=0UL; /* [B] Chunk size in bytes */
size_t cnk_sz_scl=0UL; /* [nbr] Chunk size scalar */
@@ -814,7 +815,7 @@ main(int argc,char **argv)
fl_out_tmp=nco_fl_out_open(fl_out,&FORCE_APPEND,FORCE_OVERWRITE,fl_out_fmt,&bfr_sz_hnt,RAM_CREATE,RAM_OPEN,WRT_TMP_FL,&out_id);
/* Initialize chunking from user-specified inputs */
- if(fl_out_fmt == NC_FORMAT_NETCDF4 || fl_out_fmt == NC_FORMAT_NETCDF4_CLASSIC) rcd+=nco_cnk_ini(in_id,fl_out,cnk_arg,cnk_nbr,cnk_map,cnk_plc,cnk_min_byt,cnk_sz_byt,cnk_sz_scl,&cnk);
+ if(fl_out_fmt == NC_FORMAT_NETCDF4 || fl_out_fmt == NC_FORMAT_NETCDF4_CLASSIC) rcd+=nco_cnk_ini(in_id,fl_out,cnk_arg,cnk_nbr,cnk_map,cnk_plc,cnk_csh_byt,cnk_min_byt,cnk_sz_byt,cnk_sz_scl,&cnk);
/* Define dimensions, extracted groups, variables, and attributes in output file */
(void)nco_xtr_dfn(in_id,out_id,&cnk,dfl_lvl,gpe,md5,!FORCE_APPEND,True,False,nco_pck_plc_nil,(char *)NULL,trv_tbl);
@@ -907,6 +908,7 @@ main(int argc,char **argv)
}else{ /* !tm_crd_id_in */
if(nco_dbg_lvl >= nco_dbg_std) (void)fprintf(stderr,"%s: WARNING Climatology bounds invoked on dataset with unknown time coordinate. Turning-off climatology bounds mode.\n",nco_prg_nm_get());
flg_cb=False;
+ rcd=NC_NOERR;
goto skp_cb;
} /* !tm_crd_in */
@@ -1270,18 +1272,18 @@ main(int argc,char **argv)
if(nco_prg_id == ncra) FLG_BFR_NRM=True; /* [flg] Current output buffers need normalization */
/* Re-base record coordinate and bounds if necessary (e.g., time, time_bnds) */
- if(lmt_rec[idx_rec]->origin != 0.0 && (var_prc[idx]->is_crd_var || nco_is_spc_in_cf_att(grp_id,"bounds",var_prc[idx]->id) || nco_is_spc_in_cf_att(grp_id,"climatology",var_prc[idx]->id))){
- var_sct *var_crd;
- scv_sct scv;
- /* De-reference */
- var_crd=var_prc[idx];
- scv.val.d=lmt_rec[idx_rec]->origin;
- scv.type=NC_DOUBLE;
- /* Convert scalar to variable type */
- nco_scv_cnf_typ(var_crd->type,&scv);
- (void)nco_var_scv_add(var_crd->type,var_crd->sz,var_crd->has_mss_val,var_crd->mss_val,var_crd->val,&scv);
- } /* end re-basing */
-
+ if(var_prc[idx]->is_crd_var || nco_is_spc_in_cf_att(grp_id,"bounds",var_prc[idx]->id) || nco_is_spc_in_cf_att(grp_id,"climatology",var_prc[idx]->id))
+ {
+ char *fl_udu_sng=nco_lmt_get_udu_att(grp_id,var_prc[idx]->id,"units"); /* Units attribute of coordinate variable */
+ if(fl_udu_sng && lmt_rec[idx_rec]->rbs_sng)
+ {
+ if( nco_cln_clc_dbl_var_dff(fl_udu_sng,lmt_rec[idx_rec]->rbs_sng, lmt_rec[idx_rec]->lmt_cln, (double*)NULL, var_prc[idx]) !=NCO_NOERR)
+ nco_exit(EXIT_FAILURE);
+
+ nco_free(fl_udu_sng);
+ } /* end re-basing */
+ }
+
if(nco_prg_id == ncra){
nco_bool flg_rth_ntl;
if(!rec_usd_cml[idx_rec] || (FLG_MRO && REC_FRS_GRP)) flg_rth_ntl=True; else flg_rth_ntl=False;
diff --git a/src/nco/ncwa.c b/src/nco/ncwa.c
index c6a07e6..ee9789d 100644
--- a/src/nco/ncwa.c
+++ b/src/nco/ncwa.c
@@ -220,6 +220,7 @@ main(int argc,char **argv)
nco_bool flg_rdd=False; /* [flg] Retain degenerate dimensions */
size_t bfr_sz_hnt=NC_SIZEHINT_DEFAULT; /* [B] Buffer size hint */
+ size_t cnk_csh_byt=NCO_CNK_CSH_BYT_DFL; /* [B] Chunk cache size */
size_t cnk_min_byt=NCO_CNK_SZ_MIN_BYT_DFL; /* [B] Minimize size of variable to chunk */
size_t cnk_sz_byt=0UL; /* [B] Chunk size in bytes */
size_t cnk_sz_scl=0UL; /* [nbr] Chunk size scalar */
@@ -759,7 +760,7 @@ main(int argc,char **argv)
fl_out_tmp=nco_fl_out_open(fl_out,&FORCE_APPEND,FORCE_OVERWRITE,fl_out_fmt,&bfr_sz_hnt,RAM_CREATE,RAM_OPEN,WRT_TMP_FL,&out_id);
/* Initialize chunking from user-specified inputs */
- if(fl_out_fmt == NC_FORMAT_NETCDF4 || fl_out_fmt == NC_FORMAT_NETCDF4_CLASSIC) rcd+=nco_cnk_ini(in_id,fl_out,cnk_arg,cnk_nbr,cnk_map,cnk_plc,cnk_min_byt,cnk_sz_byt,cnk_sz_scl,&cnk);
+ if(fl_out_fmt == NC_FORMAT_NETCDF4 || fl_out_fmt == NC_FORMAT_NETCDF4_CLASSIC) rcd+=nco_cnk_ini(in_id,fl_out,cnk_arg,cnk_nbr,cnk_map,cnk_plc,cnk_csh_byt,cnk_min_byt,cnk_sz_byt,cnk_sz_scl,&cnk);
/* Define dimensions, extracted groups, variables, and attributes in output file. */
(void)nco_xtr_dfn(in_id,out_id,&cnk,dfl_lvl,gpe,md5,!FORCE_APPEND,True,False,nco_pck_plc_nil,(char *)NULL,trv_tbl);
@@ -968,7 +969,7 @@ main(int argc,char **argv)
nco_var_avg() will perform nco_op_typ on all variables except coordinate variables
nco_var_avg() always averages coordinate variables */
var_prc_out[idx]=nco_var_avg(var_prc_out[idx],dmn_avg,dmn_avg_nbr,nco_op_typ,flg_rdd,&ddra_info);
- /* var_prc_out[idx]->val now holds numerator of averaging expression documented in NCO User's Guide
+ /* var_prc_out[idx]->val now holds numerator of averaging expression documented in NCO Users Guide
Denominator is also tricky due to sundry normalization options
These logical switches are tricky---modify them with care */
if(NRM_BY_DNM && DO_CONFORM_WGT && (!var_prc[idx]->is_crd_var || WGT_MSK_CRD_VAR)){
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-grass/nco.git
More information about the Pkg-grass-devel
mailing list