[med-svn] [cnrun] 16/25: WIP
andrei zavada
hmmr-guest at moszumanska.debian.org
Thu Nov 6 22:08:31 UTC 2014
This is an automated email from the git hooks/post-receive script.
hmmr-guest pushed a commit to branch WIP
in repository cnrun.
commit 31be7fcb232dcdd980b473e5552bf03e91b519cb
Author: andrei zavada <johnhommer at gmail.com>
Date: Sat Sep 27 00:05:04 2014 +0400
WIP
---
debian/control | 4 +-
upstream/configure.ac | 8 +-
upstream/src/Makefile.am | 2 +-
upstream/src/cnrun/Makefile.am | 19 +-
upstream/src/cnrun/cnrun.hh | 160 +++++
upstream/src/cnrun/commands.cc | 1012 +++++++++++++++++++++++++++++
upstream/src/cnrun/completions.cc | 506 ++++++++-------
upstream/src/cnrun/interpreter.cc | 980 ++++++----------------------
upstream/src/cnrun/main.cc | 12 +-
upstream/src/cnrun/runner.hh | 139 ----
upstream/src/libcn/Makefile.am | 1 +
upstream/src/libcn/base-neuron.hh | 48 +-
upstream/src/libcn/base-synapse.hh | 24 +-
upstream/src/libcn/base-unit.cc | 80 +--
upstream/src/libcn/base-unit.hh | 36 +-
upstream/src/libcn/forward-decls.hh | 17 +-
upstream/src/libcn/hosted-attr.hh | 44 +-
upstream/src/libcn/hosted-neurons.cc | 22 +-
upstream/src/libcn/hosted-neurons.hh | 25 +-
upstream/src/libcn/hosted-synapses.cc | 24 +-
upstream/src/libcn/hosted-synapses.hh | 24 +-
upstream/src/libcn/integrate-base.hh | 27 +-
upstream/src/libcn/integrate-rk65.hh | 36 +-
upstream/src/libcn/model-cycle.cc | 37 +-
upstream/src/libcn/model-nmlio.cc | 34 +-
upstream/src/libcn/model-struct.cc | 449 ++-----------
upstream/src/libcn/model-tags.cc | 422 ++++++++++++
upstream/src/libcn/model.hh | 129 ++--
upstream/src/libcn/mx-attr.hh | 19 +-
upstream/src/libcn/param-unit-literals.hh | 14 +-
upstream/src/libcn/sources.cc | 202 ++++--
upstream/src/libcn/sources.hh | 201 +++---
upstream/src/libcn/standalone-attr.hh | 54 +-
upstream/src/libcn/standalone-neurons.cc | 24 +-
upstream/src/libcn/standalone-neurons.hh | 52 +-
upstream/src/libcn/standalone-synapses.cc | 25 +-
upstream/src/libcn/standalone-synapses.hh | 40 +-
upstream/src/libcn/types.cc | 22 +-
upstream/src/libcn/types.hh | 16 +-
upstream/src/libstilton/containers.hh | 8 +-
upstream/src/libstilton/exprparser.cc | 18 +-
upstream/src/libstilton/exprparser.hh | 19 +-
upstream/src/libstilton/libcommon.cc | 16 +-
upstream/src/libstilton/misc.hh | 8 +-
upstream/src/libstilton/string.hh | 12 +-
45 files changed, 2878 insertions(+), 2193 deletions(-)
diff --git a/debian/control b/debian/control
index f9e7d19..4a4da22 100644
--- a/debian/control
+++ b/debian/control
@@ -2,7 +2,9 @@ Source: cnrun
Section: science
Priority: optional
Maintainer: Andrei Zavada <johnhommer at gmail.com>
-Build-Depends: debhelper (>= 9), dh-autoreconf, autoconf-archive, g++, libgomp1, libreadline6-dev, pkg-config, libgsl0-dev, libxml2-dev
+Build-Depends: debhelper (>= 9), dh-autoreconf, autoconf-archive, g++,
+ libgomp1, libreadline6-dev, pkg-config, libgsl0-dev, libxml2-dev,
+ liblua5.2-dev
Standards-Version: 3.9.5
Homepage: http://johnhommer.com/academic/code/cnrun
Vcs-Git: git://git.debian.org/git/debian-med/cnrun.git
diff --git a/upstream/configure.ac b/upstream/configure.ac
index b5b3dfc..9851d4e 100644
--- a/upstream/configure.ac
+++ b/upstream/configure.ac
@@ -1,6 +1,6 @@
AC_COPYRIGHT([Copyright (c) 2008-14 Andrei Zavada <johnhommer at gmail.com>])
-AC_INIT([cnrun], [1.1.15_rc], [johnhommer at gmail.com])
+AC_INIT([cnrun], [1.2_rc], [johnhommer at gmail.com])
AC_CONFIG_SRCDIR([src/cnrun/main.cc])
AC_CONFIG_MACRO_DIR([m4])
AC_PREREQ(2.61)
@@ -60,10 +60,14 @@ fi
PKG_CHECK_MODULES([LIBCN], [gsl libxml-2.0])
+AX_PROG_LUA([5.1])
+AX_LUA_LIBS
+AX_LUA_HEADERS
+
AC_ARG_ENABLE(
[tools],
- AS_HELP_STRING( [--enable-tools], [build spike2sdf, varfold & hh-latency-estimator (default = yes)]),
+ AS_HELP_STRING( [--enable-tools], [build spike2sdf, varfold & hh-latency-estimator (default = no)]),
[do_tools=$enableval], [do_tools=no])
AM_CONDITIONAL(DO_TOOLS, [test x"$do_tools" = xyes])
if test x"$do_tools" != xyes; then
diff --git a/upstream/src/Makefile.am b/upstream/src/Makefile.am
index d09bdcd..92ebb6c 100644
--- a/upstream/src/Makefile.am
+++ b/upstream/src/Makefile.am
@@ -1,6 +1,6 @@
include $(top_srcdir)/src/Common.mk
-SUBDIRS = . libstilton libcn cnrun
+SUBDIRS = libstilton libcn cnrun
if DO_TOOLS
SUBDIRS += tools
endif
diff --git a/upstream/src/cnrun/Makefile.am b/upstream/src/cnrun/Makefile.am
index 36a717d..8737de8 100644
--- a/upstream/src/cnrun/Makefile.am
+++ b/upstream/src/cnrun/Makefile.am
@@ -1,20 +1,23 @@
include $(top_srcdir)/src/Common.mk
+AM_CXXFLAGS += $(LUA_INCLUDE)
if DO_PCH
-BUILT_SOURCES = \
+BUILT_SOURCES := \
cnrun.hh.gch
-CLEANFILES = $(BUILT_SOURCES)
+CLEANFILES := $(BUILT_SOURCES)
endif
-bin_PROGRAMS = \
+bin_PROGRAMS := \
cnrun
-cnrun_SOURCES = \
- interpreter.cc completions.cc cnrun.hh main.cc
-cnrun_LDADD = \
+cnrun_SOURCES := \
+ interpreter.cc commands.cc cnrun.hh main.cc
+cnrun_LDADD := \
$(top_srcdir)/src/libicing.a \
$(top_srcdir)/src/libcn/libcn.la \
$(top_srcdir)/src/libstilton/libstilton.la \
- $(LIBCN_LIBS)
-cnrun_LDFLAAGS = \
+ $(LIBCN_LIBS) \
+ $(LUA_LIB)
+
+cnrun_LDFLAAGS := \
-shared
diff --git a/upstream/src/cnrun/cnrun.hh b/upstream/src/cnrun/cnrun.hh
new file mode 100644
index 0000000..3074637
--- /dev/null
+++ b/upstream/src/cnrun/cnrun.hh
@@ -0,0 +1,160 @@
+/*
+ * File name: cnrun/cnrun.hh
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * Initial version: 2008-11-04
+ *
+ * Purpose: interpreter
+ *
+ * License: GPL
+ */
+
+#ifndef CNRUN_CNRUN_CNRUN_H_
+#define CNRUN_CNRUN_CNRUN_H_
+
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
+
+#include <list>
+#include <string>
+extern "C" {
+#include <lua.h>
+}
+
+#include "libstilton/misc.hh"
+#include "libcn/model.hh"
+
+namespace cnrun {
+
+struct SInterpOptions
+ : public cnrun::SModelOptions {
+ bool dump_params:1,
+ list_units:1;
+ string working_dir;
+
+ list<string>
+ scripts;
+
+ SInterpOptions ()
+ : dump_params (false),
+ list_units (false),
+ working_dir (".")
+ {}
+};
+
+
+class CInterpreterShell
+ : public cnrun::stilton::C_verprintf {
+
+ public:
+ CInterpreterShell (const SInterpOptions& options_);
+ ~CInterpreterShell ();
+
+ SInterpOptions
+ options;
+
+ enum class TScriptExecResult {
+ ok, file_error, compile_error, stack_error, call_error
+ };
+ TScriptExecResult exec_script( const string& script_fname);
+ // individual commands
+ struct SArg {
+ char type;
+ double vg; int vd; string vs;
+ explicit SArg (const double& v) : type ('g'), vg (v) {}
+ explicit SArg (const int& v) : type ('d'), vd (v) {}
+ explicit SArg (const string& v) : type ('s'), vs (v) {}
+ };
+ enum TCmdResult {
+ ok = 0,
+ no_function, bad_arity,
+ bad_id, bad_value, bad_param, logic_error,
+ no_file, system_error, exit,
+ };
+ struct SCmdResult {
+ TCmdResult result;
+ string error_message;
+ vector<SArg> values;
+ SCmdResult ()
+ : result (TCmdResult::ok)
+ {}
+ SCmdResult (SCmdResult&& rv)
+ : result (rv.result), error_message (move(rv.error_message)),
+ values (move(rv.values))
+ {}
+ };
+ using TArgs = const vector<SArg>;
+ SCmdResult cmd_new_model( TArgs&);
+ SCmdResult cmd_delete_model( TArgs&);
+ SCmdResult cmd_import_nml( TArgs&);
+ SCmdResult cmd_export_nml( TArgs&);
+ SCmdResult cmd_reset_model( TArgs&);
+ SCmdResult cmd_cull_deaf_synapses( TArgs&);
+ SCmdResult cmd_describe_model( TArgs&);
+ SCmdResult cmd_get_model_parameter( TArgs&);
+ SCmdResult cmd_set_model_parameter( TArgs&);
+ SCmdResult cmd_advance( TArgs&);
+ SCmdResult cmd_advance_until( TArgs&);
+
+ SCmdResult cmd_new_neuron( TArgs&);
+ SCmdResult cmd_new_synapse( TArgs&);
+ SCmdResult cmd_get_unit_properties( TArgs&);
+ SCmdResult cmd_get_unit_parameter( TArgs&);
+ SCmdResult cmd_set_unit_parameter( TArgs&);
+ SCmdResult cmd_get_unit_vars( TArgs&);
+ SCmdResult cmd_reset_unit( TArgs&);
+
+ SCmdResult cmd_get_units_matching( TArgs&);
+ SCmdResult cmd_get_units_of_type( TArgs&);
+ SCmdResult cmd_set_matching_neuron_parameter( TArgs&);
+ SCmdResult cmd_set_matching_synapse_parameter( TArgs&);
+ SCmdResult cmd_revert_matching_unit_parameters( TArgs&);
+ SCmdResult cmd_decimate( TArgs&);
+ SCmdResult cmd_putout( TArgs&);
+
+ SCmdResult cmd_new_tape_source( TArgs&);
+ SCmdResult cmd_new_periodic_source( TArgs&);
+ SCmdResult cmd_new_noise_source( TArgs&);
+ SCmdResult cmd_get_sources( TArgs&);
+ SCmdResult cmd_connect_source( TArgs&);
+ SCmdResult cmd_disconnect_source( TArgs&);
+
+ SCmdResult cmd_start_listen( TArgs&);
+ SCmdResult cmd_stop_listen( TArgs&);
+ SCmdResult cmd_start_log_spikes( TArgs&);
+ SCmdResult cmd_stop_log_spikes( TArgs&);
+
+ // vp
+ int verbose_threshold() const
+ { return options.verbosely; }
+ private:
+ map<string, CModel*>
+ models;
+
+ const string
+ current_script;
+
+ lua_State
+ *lua_state;
+
+ // enum class TIssueType { warning, syntax_error, system_error };
+ // static const char* _issue_type_s(TIssueType);
+ // void _report_script_issue( TIssueType, const char* fmt, ...) const
+ // __attribute__ ((format (printf, 3, 4)));
+ public:
+ static list<string> list_commands();
+};
+
+//char** cnrun_completion( const char *text, int start, int end);
+
+} // namespace cnrun
+
+#endif
+
+// Local Variables:
+// Mode: c++
+// indent-tabs-mode: nil
+// tab-width: 8
+// c-basic-offset: 8
+// End:
diff --git a/upstream/src/cnrun/commands.cc b/upstream/src/cnrun/commands.cc
new file mode 100644
index 0000000..20e1a62
--- /dev/null
+++ b/upstream/src/cnrun/commands.cc
@@ -0,0 +1,1012 @@
+/*
+ * File name: cnrun/commands.cc
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * Initial version: 2014-09-21
+ *
+ * Purpose: interpreter commands (as CInterpreterShell methods)
+ *
+ * License: GPL
+ */
+
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
+
+#include <sys/stat.h>
+#include <stdio.h>
+#include <unistd.h>
+#include <regex.h>
+#include <list>
+
+#include "libstilton/string.hh"
+#include "libcn/integrate-rk65.hh"
+#include "libcn/base-unit.hh"
+#include "libcn/hosted-neurons.hh" // for TIncludeOption
+#include "cnrun.hh"
+
+using namespace std;
+using namespace cnrun;
+
+namespace {
+inline const char* es(int x) { return (x == 1) ? "" : "s"; }
+}
+
+#define CMD_PROLOG(N, F) \
+ CInterpreterShell::SCmdResult R; \
+ if ( aa.size() != N ) { \
+ vp( 0, F"() takes %d arg%s, called with %zu", N, es(N), aa.size()); \
+ return R.result = TCmdResult::bad_arity, move(R); \
+ } \
+ const char *model = aa[0].vs.c_str(); \
+ if ( models.find(model) == models.end() ) { \
+ vp( 0, F"(): no such model: \"%s\"", model); \
+ return R.result = TCmdResult::logic_error, move(R); \
+ } \
+ auto& M = *models.at(model);
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_new_model( const TArgs& aa)
+{
+ CInterpreterShell::SCmdResult R;
+ if ( aa.size() != 1) {
+ vp( 0, stderr, "new_model() takes 1 parameter, got %zu", aa.size());
+ return R.result = TCmdResult::bad_arity, move(R);
+ }
+ const string& model_name = aa[0].vs;
+
+ auto M = new CModel(
+ model_name,
+ new CIntegrateRK65(
+ options.integration_dt_min,
+ options.integration_dt_max,
+ options.integration_dt_cap),
+ options);
+ if ( !M ) {
+ vp( 0, stderr, "Failed to create model");
+ return R.result = TCmdResult::system_error, move(R);
+ }
+ models[model_name] = M;
+
+ vp( 3,
+ "generator type: %s\n"
+ " seed: %lu\n"
+ " first value: %lu\n",
+ gsl_rng_name( M->rng()),
+ gsl_rng_default_seed,
+ gsl_rng_get( M->rng()));
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_delete_model( const TArgs& aa)
+{
+ CMD_PROLOG (1, "delete_model");
+
+ delete &M;
+ models.erase(aa[0].vs.c_str());
+
+ return move(R);
+}
+
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_import_nml( const TArgs& aa)
+{
+ CMD_PROLOG (1, "import_nml")
+
+ const string
+ &fname = aa[1].vs;
+ string fname2 = stilton::str::tilda2homedir(fname);
+ if ( M.import_NetworkML( fname2, CModel::TNMLImportOption::merge) < 0 ) {
+ return R.result = TCmdResult::system_error, move(R);
+ }
+
+ M.cull_blind_synapses();
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_export_nml( const TArgs& aa)
+{
+ CMD_PROLOG (1, "export_nml")
+
+ const string
+ &fname = aa[1].vs;
+ string fname2 = stilton::str::tilda2homedir(fname);
+ if ( M.export_NetworkML( fname2) < 0 ) {
+ return R.result = TCmdResult::system_error, move(R);
+ }
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_reset_model( const TArgs& aa)
+{
+ CMD_PROLOG (1, "reset_model")
+
+ M.reset( CModel::TResetOption::no_params); // for with_params, there is revert_unit_parameters()
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_cull_deaf_synapses( const TArgs& aa)
+{
+ CMD_PROLOG (1, "cull_deaf_synapses")
+
+ M.cull_deaf_synapses();
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_describe_model( const TArgs& aa)
+{
+ CMD_PROLOG (1, "describe_model");
+
+ M.dump_metrics();
+ M.dump_units();
+ M.dump_state();
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_get_model_parameter( const TArgs& aa)
+{
+ CInterpreterShell::SCmdResult R;
+ if ( aa.size() != 2 ) {
+ vp( 0, "get_model_parameter() takes 2 args, called with %zu", aa.size());
+ return R.result = TCmdResult::bad_arity, move(R);
+ }
+ const string& model_name = aa[0].vs;
+ CModel *M = nullptr;
+ if ( model_name.size() != 0 ) {
+ auto Mi = models.find(model_name);
+ if ( Mi == models.end() ) {
+ vp( 0, "get_model_parameter(): no such model: \"%s\"", model_name.c_str());
+ return R.result = TCmdResult::logic_error, move(R);
+ } else
+ M = Mi->second;
+ }
+
+ const string
+ ¶meter = aa[1].vs;
+
+ if ( parameter == "verbosely" ) {
+ R.values.push_back( SArg (M ? M->options.verbosely : options.verbosely));
+
+ } else if ( parameter == "integration_dt_min" ) {
+ R.values.push_back( SArg (M ? M->dt_min() : options.integration_dt_min));
+
+ } else if ( parameter == "integration_dt_max" ) {
+ R.values.push_back( SArg (M ? M->dt_min() : options.integration_dt_max));
+
+ } else if ( parameter == "integration_dt_cap" ) {
+ R.values.push_back( SArg (M ? M->dt_min() : options.integration_dt_cap));
+
+ } else if ( parameter == "listen_dt" ) {
+ R.values.push_back( SArg (M ? M->options.listen_dt : options.listen_dt));
+
+ } else if ( parameter == "listen_mode" ) {
+ auto F = [] (bool v) -> char { return v ? '+' : '-'; };
+ R.values.push_back(
+ SArg (M
+ ? stilton::str::sasprintf(
+ "1%cd%cb%c",
+ F(M->options.listen_1varonly),
+ F(M->options.listen_deferwrite),
+ F(M->options.listen_binary))
+ : stilton::str::sasprintf(
+ "1%cd%cb%c",
+ F(options.listen_1varonly),
+ F(options.listen_deferwrite),
+ F(options.listen_binary))));
+
+ } else if ( parameter == "sxf_start_delay" ) {
+ R.values.push_back( SArg (M ? M->options.sxf_start_delay : options.sxf_start_delay));
+
+ } else if ( parameter == "sxf_period" ) {
+ R.values.push_back( SArg (M ? M->options.sxf_period : options.sxf_period));
+
+ } else if ( parameter == "sdf_sigma" ) {
+ R.values.push_back( SArg (M ? M->options.sdf_sigma : options.sdf_sigma));
+
+ }
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_set_model_parameter( const TArgs& aa)
+{
+ CInterpreterShell::SCmdResult R;
+ if ( aa.size() != 3 ) {
+ vp( 0, "set_model_parameter() takes 3 args, called with %zu", aa.size());
+ return R.result = TCmdResult::bad_arity, move(R);
+ }
+ const string& model = aa[0].vs;
+ CModel *M = nullptr;
+ if ( model.size() != 0 ) {
+ auto Mi = models.find(model);
+ if ( Mi == models.end() ) {
+ vp( 0, "set_model_parameter(): no such model: \"%s\"", model.c_str());
+ return R.result = TCmdResult::logic_error, move(R);
+ } else
+ M = Mi->second;
+ }
+
+ const string
+ ¶meter = aa[1].vs,
+ &value_s = aa[2].vs; // unconverted
+
+ if ( parameter == "verbosely") {
+ int v;
+ if ( 1 != sscanf( value_s.c_str(), "%d", &v) ) {
+ vp( 0, stderr, "set_model_parameter(): bad value for parameter `verbosely'");
+ return R.result = TCmdResult::bad_value, move(R);
+ }
+ options.verbosely = v;
+ if ( M )
+ M->options.verbosely = v;
+
+ } else if ( parameter == "integration_dt_min" ) {
+ double v;
+ if ( 1 != sscanf( value_s.c_str(), "%lg", &v) ) {
+ vp( 0, stderr, "set_model_parameter(): bad value for parameter `integration_dt_min'");
+ return R.result = TCmdResult::bad_value, move(R);
+ }
+ options.integration_dt_min = v;
+ if ( M )
+ M->set_dt_min( v);
+
+ } else if ( parameter == "integration_dt_max" ) {
+ double v;
+ if ( 1 != sscanf( value_s.c_str(), "%lg", &v) ) {
+ vp( 0, stderr, "set_model_parameter(): bad value for parameter `integration_dt_max'");
+ return R.result = TCmdResult::bad_value, move(R);
+ }
+ options.integration_dt_max = v;
+ if ( M )
+ M->set_dt_max( v);
+
+ } else if ( parameter == "integration_dt_cap" ) {
+ double v;
+ if ( 1 != sscanf( value_s.c_str(), "%lg", &v) ) {
+ vp( 0, stderr, "set_model_parameter(): bad value for parameter `integration_dt_cap'");
+ return R.result = TCmdResult::bad_value, move(R);
+ }
+ options.integration_dt_cap = v;
+ if ( M )
+ M->set_dt_cap( v);
+
+ } else if ( parameter == "listen_dt" ) {
+ double v;
+ if ( 1 != sscanf( value_s.c_str(), "%lg", &v) ) {
+ vp( 0, stderr, "set_model_parameter(): bad value for parameter `listen_dt'");
+ return R.result = TCmdResult::bad_value, move(R);
+ }
+ options.listen_dt = v;
+ if ( M )
+ M->options.listen_dt = v;
+
+ } else if ( parameter == "listen_mode" ) {
+ size_t p;
+ if ( (p = value_s.find('1')) != string::npos ) options.listen_1varonly = (value_s[p+1] != '-');
+ if ( (p = value_s.find('d')) != string::npos ) options.listen_deferwrite = (value_s[p+1] != '-');
+ if ( (p = value_s.find('b')) != string::npos ) options.listen_binary = (value_s[p+1] != '-');
+ if ( M ) {
+ M->options.listen_1varonly = options.listen_1varonly;
+ M->options.listen_deferwrite = options.listen_deferwrite;
+ M->options.listen_binary = options.listen_binary;
+ }
+ // better spell out these parameters, ffs
+
+ } else if ( parameter == "sxf_start_delay" ) {
+ double v;
+ if ( 1 != sscanf( value_s.c_str(), "%lg", &v) ) {
+ vp( 0, stderr, "set_model_parameter(): bad value for parameter `sxf_start_delay'");
+ return R.result = TCmdResult::bad_value, move(R);
+ }
+ options.sxf_start_delay = v;
+ if ( M )
+ M->options.sxf_start_delay = v;
+
+ } else if ( parameter == "sxf_period" ) {
+ double v;
+ if ( 1 != sscanf( value_s.c_str(), "%lg", &v) ) {
+ vp( 0, stderr, "set_model_parameter(): bad value for parameter `sxf_period'");
+ return R.result = TCmdResult::bad_value, move(R);
+ }
+ options.sxf_period = v;
+ if ( M )
+ M->options.sxf_period = v;
+
+ } else if ( parameter == "sdf_sigma" ) {
+ double v;
+ if ( 1 != sscanf( value_s.c_str(), "%lg", &v) ) {
+ vp( 0, stderr, "set_model_parameter(): bad value for parameter `sdf_sigma'");
+ return R.result = TCmdResult::bad_value, move(R);
+ }
+ options.sdf_sigma = v;
+ if ( M )
+ M->options.sdf_sigma = v;
+ }
+
+ return move(R);
+}
+
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_advance( const TArgs& aa)
+{
+ CMD_PROLOG (2, "advance")
+
+ const double& time_to_go = aa[1].vg;
+ const double end_time = M.model_time() + time_to_go;
+ if ( M.model_time() > end_time ) {
+ vp( 0, stderr, "advance(%g): Cannot go back in time (model is now at %g sec)", end_time, M.model_time());
+ return R.result = TCmdResult::bad_value, move(R);
+ }
+ if ( !M.advance( end_time) ) {
+ return R.result = TCmdResult::logic_error, move(R);
+ }
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_advance_until( const TArgs& aa)
+{
+ CMD_PROLOG (2, "advance_until")
+
+ const double end_time = aa[1].vg;
+ if ( M.model_time() > end_time ) {
+ vp( 0, stderr, "advance_until(%g): Cannot go back in time (model is now at %g sec)", end_time, M.model_time());
+ return R.result = TCmdResult::bad_value, move(R);
+ }
+ if ( !M.advance( end_time) ) {
+ return R.result = TCmdResult::logic_error, move(R);
+ }
+
+ return move(R);
+}
+
+
+// ----------------------------------------
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_new_neuron( const TArgs& aa)
+{
+ // arity has been checked already, in host_fun(), but it may still make sense to
+ CMD_PROLOG (3, "new_neuron")
+
+ const string
+ &type = aa[1].vs,
+ &label = aa[2].vs;
+
+ if ( !M.add_neuron_species(
+ type, label,
+ TIncludeOption::is_last) ) {
+ // vp( "`add_neuron' failed"); // we trust sufficient diagnostics has been reported
+ return R.result = TCmdResult::logic_error, move(R);
+ }
+
+ return /* R.result = TCmdResult::ok, */ move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_new_synapse( const TArgs& aa)
+{
+ CMD_PROLOG (5, "new_synapse")
+
+ const string
+ &type = aa[1].vs,
+ &src = aa[2].vs,
+ &tgt = aa[3].vs;
+ const double
+ &g = aa[4].vg;
+
+ if ( !M.add_synapse_species(
+ type, src, tgt, g,
+ CModel::TSynapseCloningOption::yes,
+ TIncludeOption::is_last) ) {
+ return R.result = TCmdResult::logic_error, move(R);
+ }
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_get_unit_properties( const TArgs& aa)
+{
+ CMD_PROLOG (2, "get_unit_properties")
+
+ const string
+ &label = aa[1].vs;
+ auto Up = M.unit_by_label(label);
+ if ( Up ) {
+ R.values.push_back( SArg (Up->label()));
+ R.values.push_back( SArg (Up->class_name()));
+ R.values.push_back( SArg (Up->family()));
+ R.values.push_back( SArg (Up->species()));
+ R.values.push_back( SArg ((int)Up->has_sources()));
+ R.values.push_back( SArg ((int)Up->is_not_altered()));
+ } else {
+ vp( 0, stderr, "get_unit_properties(\"%s\"): No such unit\n", label.c_str());
+ return R.result = TCmdResult::bad_id, move(R);
+ }
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_get_unit_parameter( const TArgs& aa)
+{
+ CMD_PROLOG (3, "get_unit_parameter")
+
+ const string
+ &label = aa[1].vs,
+ ¶m = aa[2].vs;
+ auto Up = M.unit_by_label(label);
+ if ( Up )
+ try {
+ R.values.push_back(
+ SArg (Up->get_param_value( param)));
+ } catch (exception& ex) {
+ return R.result = TCmdResult::bad_param, move(R);
+ }
+ else {
+ vp( 0, stderr, "get_unit_parameter(\"%s\"): No such unit\n", label.c_str());
+ return R.result = TCmdResult::bad_id, move(R);
+ }
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_set_unit_parameter( const TArgs& aa)
+{
+ CMD_PROLOG (4, "set_unit_parameter")
+
+ const string
+ &label = aa[1].vs,
+ ¶m = aa[2].vs;
+ const double
+ &value = aa[3].vg;
+
+ auto Up = M.unit_by_label(label);
+ if ( Up )
+ try {
+ Up->param_value( param) = value;
+ } catch (exception& ex) {
+ return R.result = TCmdResult::bad_param, move(R);
+ }
+ else {
+ vp( 0, stderr, "set_unit_parameter(\"%s\"): No such unit\n", label.c_str());
+ return R.result = TCmdResult::bad_id, move(R);
+ }
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_get_unit_vars( const TArgs& aa)
+{
+ CMD_PROLOG (3, "get_unit_vars")
+
+ const string
+ &label = aa[1].vs,
+ ¶m = aa[2].vs;
+ auto Up = M.unit_by_label(label);
+ if ( Up )
+ try {
+ R.values.push_back(
+ SArg (Up->get_param_value( param)));
+ } catch (exception& ex) {
+ return R.result = TCmdResult::bad_param, move(R);
+ }
+ else {
+ vp( 0, stderr, "get_unit_parameter(\"%s\"): No such unit\n", label.c_str());
+ return R.result = TCmdResult::bad_id, move(R);
+ }
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_reset_unit( const TArgs& aa)
+{
+ CMD_PROLOG (3, "reset_unit")
+
+ const string
+ &label = aa[1].vs;
+ auto Up = M.unit_by_label(label);
+ if ( Up )
+ Up -> reset_state();
+ else {
+ vp( 0, stderr, "reset_unit(\"%s\"): No such unit\n", label.c_str());
+ return R.result = TCmdResult::bad_id, move(R);
+ }
+
+ return move(R);
+}
+
+
+// ----------------------------------------
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_get_units_matching( const TArgs& aa)
+{
+ CMD_PROLOG (2, "get_units_matching")
+
+ const string &label = aa[1].vs;
+ auto L = M.list_units( label);
+ for ( auto& U : L )
+ R.values.emplace_back( SArg (U->label()));
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_get_units_of_type( const TArgs& aa)
+{
+ CMD_PROLOG (2, "get_units_of_type")
+
+ const string &type = aa[1].vs;
+ auto L = M.list_units();
+ for ( auto& U : L )
+ if ( type == U->species() )
+ R.values.emplace_back( SArg (U->label()));
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_set_matching_neuron_parameter( const TArgs& aa)
+{
+ CMD_PROLOG (4, "set_matching_neuron_parameter")
+
+ const string
+ &label = aa[1].vs,
+ ¶m = aa[2].vs;
+ const double
+ &value = aa[3].vg;
+
+ list<CModel::STagGroupNeuronParmSet> tags {CModel::STagGroupNeuronParmSet (label, param, value)};
+ R.values.push_back(
+ SArg ((int)M.process_paramset_static_tags(
+ tags)));
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_set_matching_synapse_parameter( const TArgs& aa)
+{
+ CMD_PROLOG (5, "set_matching_synapse_parameter")
+
+ const string
+ &src = aa[1].vs,
+ &tgt = aa[2].vs,
+ ¶m = aa[3].vs;
+ const double
+ &value = aa[4].vg;
+
+ list<CModel::STagGroupSynapseParmSet> tags {CModel::STagGroupSynapseParmSet (src, tgt, param, value)};
+ R.values.push_back(
+ SArg ((int)M.process_paramset_static_tags(
+ tags)));
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_revert_matching_unit_parameters( const TArgs& aa)
+{
+ CMD_PROLOG (4, "revert_matching_unit_parameters")
+
+ const string
+ &label = aa[1].vs;
+
+ auto L = M.list_units( label);
+ size_t count = 0;
+ for ( auto& U : L ) {
+ U->reset_params();
+ ++count;
+ }
+
+ R.values.push_back( SArg ((int)count));
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_decimate( const TArgs& aa)
+{
+ CMD_PROLOG (3, "decimate")
+
+ const string &label = aa[1].vs;
+ const double& frac = aa[2].vg;
+ if ( frac < 0. || frac > 1. ) {
+ vp( 0, stderr, "decimate(%g): Decimation fraction outside [0..1]", frac);
+ return R.result = TCmdResult::bad_value, move(R);
+ }
+
+ list<CModel::STagGroupDecimate> tags {{label, frac}};
+ R.values.push_back(
+ SArg ((int)M.process_decimate_tags(
+ tags)));
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_putout( const TArgs& aa)
+{
+ CMD_PROLOG (2, "putout")
+
+ const string &label = aa[1].vs;
+
+ list<CModel::STagGroup> tags {{label, CModel::STagGroup::TInvertOption::no}};
+ R.values.push_back(
+ SArg ((int)M.process_putout_tags(
+ tags)));
+
+ return move(R);
+}
+
+
+// ----------------------------------------
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_new_tape_source( const TArgs& aa)
+{
+ CMD_PROLOG (4, "new_tape_source")
+
+ const string
+ &name = aa[1].vs,
+ &fname = aa[2].vs;
+ const double
+ &looping = aa[3].vd;
+
+ if ( M.source_by_id( name) ) {
+ vp( 0, stderr, "new_tape_source(): A source named \"%s\" already exists", name.c_str());
+ return R.result = TCmdResult::logic_error, move(R);
+ }
+
+ try {
+ auto source = new CSourceTape(
+ name, fname,
+ looping ? TSourceLoopingOption::yes : TSourceLoopingOption::no);
+ if ( source )
+ M.add_source( source);
+ else {
+ vp( 0, stderr, "new_tape_source(\"%s\", \"%s\"): Failed impossibly",
+ name.c_str(), fname.c_str());
+ return R.result = TCmdResult::system_error, move(R);
+ }
+ } catch (exception& ex) {
+ vp( 0, stderr, "new_tape_source(\"%s\", \"%s\"): %s",
+ name.c_str(), fname.c_str(), ex.what());
+ return R.result = TCmdResult::system_error, move(R);
+ }
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_new_periodic_source( const TArgs& aa)
+{
+ CMD_PROLOG (5, "new_periodic_source")
+
+ const string
+ &name = aa[1].vs,
+ &fname = aa[2].vs;
+ const int
+ &looping = aa[3].vd;
+ const double
+ &period = aa[4].vg;
+
+ if ( M.source_by_id( name) ) {
+ vp( 0, stderr, "new_periodic_source(): A source named \"%s\" already exists", name.c_str());
+ return R.result = TCmdResult::logic_error, move(R);
+ }
+
+ try {
+ auto source = new CSourcePeriodic(
+ name, fname,
+ looping ? TSourceLoopingOption::yes : TSourceLoopingOption::no,
+ period);
+ if ( source )
+ M.add_source( source);
+ else {
+ vp( 0, stderr, "new_periodic_source(\"%s\", \"%s\"): Failed impossibly",
+ name.c_str(), fname.c_str());
+ return R.result = TCmdResult::system_error, move(R);
+ }
+ } catch (exception& ex) {
+ vp( 0, stderr, "new_periodic_source(\"%s\", \"%s\"): %s",
+ name.c_str(), fname.c_str(), ex.what());
+ return R.result = TCmdResult::system_error, move(R);
+ }
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_new_noise_source( const TArgs& aa)
+{
+ CMD_PROLOG (6, "new_noise_source")
+
+ const string
+ &name = aa[1].vs;
+ const double
+ &min = aa[2].vg,
+ &max = aa[3].vg,
+ &sigma = aa[4].vg;
+ const string
+ &distribution = aa[5].vs;
+
+ if ( M.source_by_id( name) ) {
+ vp( 0, stderr, "new_noise_source(): A source named \"%s\" already exists", name.c_str());
+ return R.result = TCmdResult::logic_error, move(R);
+ }
+
+ try {
+ auto source = new CSourceNoise(
+ name, min, max, sigma, CSourceNoise::distribution_by_name(distribution));
+ if ( source )
+ M.add_source( source);
+ else {
+ vp( 0, stderr, "new_noise_source(\"%s\"): Failed impossibly",
+ name.c_str());
+ return R.result = TCmdResult::system_error, move(R);
+ }
+ } catch (exception& ex) {
+ vp( 0, stderr, "new_noise_source(\"%s\"): %s",
+ name.c_str(), ex.what());
+ return R.result = TCmdResult::system_error, move(R);
+ }
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_get_sources( const TArgs& aa)
+{
+ CMD_PROLOG (1, "get_sources")
+
+ for ( auto& S : M.sources() )
+ R.values.push_back( SArg (S->name()));
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_connect_source( const TArgs& aa)
+{
+ CMD_PROLOG (4, "connect_source")
+
+ const string
+ &label = aa[1].vs,
+ &parm = aa[2].vs,
+ &source = aa[3].vs;
+ C_BaseSource *S = M.source_by_id( source);
+ if ( !S ) {
+ vp( 0, stderr, "connect_source(): Unknown source: \"%s\"", source.c_str());
+ return R.result = TCmdResult::bad_id, move(R);
+ }
+ // cannot check whether units matching label indeed have a parameter so named
+ list<CModel::STagGroupSource> tags {{label, parm, S, CModel::STagGroup::TInvertOption::no}};
+ R.values.push_back(
+ SArg ((int)M.process_paramset_source_tags(
+ tags)));
+
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_disconnect_source( const TArgs& aa)
+{
+ CMD_PROLOG (4, "disconnect_source")
+
+ const string
+ &label = aa[1].vs,
+ &parm = aa[2].vs,
+ &source = aa[3].vs;
+ C_BaseSource *S = M.source_by_id( source);
+ if ( !S ) {
+ vp( 0, stderr, "disconnect_source(): Unknown source: \"%s\"", source.c_str());
+ return R.result = TCmdResult::bad_id, move(R);
+ }
+ list<CModel::STagGroupSource> tags {{label, parm, S, CModel::STagGroup::TInvertOption::yes}};
+ R.values.push_back(
+ SArg ((int)M.process_paramset_source_tags(
+ tags)));
+
+ return move(R);
+}
+
+
+// ----------------------------------------
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_start_listen( const TArgs& aa)
+{
+ CMD_PROLOG (2, "start_listen")
+
+ const string
+ &label = aa[1].vs;
+ list<CModel::STagGroupListener> tags {CModel::STagGroupListener (
+ label, (0
+ | (M.options.listen_1varonly ? CN_ULISTENING_1VARONLY : 0)
+ | (M.options.listen_deferwrite ? CN_ULISTENING_DEFERWRITE : 0)
+ | (M.options.listen_binary ? CN_ULISTENING_BINARY : CN_ULISTENING_DISK)),
+ CModel::STagGroup::TInvertOption::no)};
+ R.values.push_back(
+ SArg ((int)M.process_listener_tags(
+ tags)));
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_stop_listen( const TArgs& aa)
+{
+ CMD_PROLOG (2, "stop_listen")
+
+ const string
+ &label = aa[1].vs;
+ list<CModel::STagGroupListener> tags {{
+ label, (0
+ | (M.options.listen_1varonly ? CN_ULISTENING_1VARONLY : 0)
+ | (M.options.listen_deferwrite ? CN_ULISTENING_DEFERWRITE : 0)
+ | (M.options.listen_binary ? CN_ULISTENING_BINARY : CN_ULISTENING_DISK)),
+ CModel::STagGroup::TInvertOption::yes}};
+ R.values.push_back(
+ SArg ((int)M.process_listener_tags(
+ tags)));
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_start_log_spikes( const TArgs& aa)
+{
+ CMD_PROLOG (2, "start_log_spikes")
+
+ if ( M.options.sxf_period <= 0. || M.options.sdf_sigma <= 0. )
+ vp( 1, "SDF parameters not set up, will only log spike times");
+
+ const string
+ &label = aa[1].vs;
+ list<CModel::STagGroupSpikelogger> tags {{
+ label,
+ M.options.sxf_period, M.options.sdf_sigma, M.options.sxf_start_delay,
+ CModel::STagGroup::TInvertOption::no}};
+ R.values.push_back(
+ SArg ((int)M.process_spikelogger_tags(
+ tags)));
+ return move(R);
+}
+
+
+
+CInterpreterShell::SCmdResult
+cnrun::CInterpreterShell::
+cmd_stop_log_spikes( const TArgs& aa)
+{
+ CMD_PROLOG (2, "start_log_spikes")
+
+ const string
+ &label = aa[1].vs;
+ list<CModel::STagGroupSpikelogger> tags {{
+ label,
+ M.options.sxf_period, M.options.sdf_sigma, M.options.sxf_start_delay,
+ CModel::STagGroup::TInvertOption::yes}};
+ R.values.push_back(
+ SArg ((int)M.process_spikelogger_tags(
+ tags)));
+ return move(R);
+}
+
+
+// Local Variables:
+// Mode: c++
+// indent-tabs-mode: nil
+// tab-width: 8
+// c-basic-offset: 8
+// End:
diff --git a/upstream/src/cnrun/completions.cc b/upstream/src/cnrun/completions.cc
index 3609ee1..6ba8cdd 100644
--- a/upstream/src/cnrun/completions.cc
+++ b/upstream/src/cnrun/completions.cc
@@ -1,20 +1,20 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
+ * File name: cnrun/completions.cc
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * Initial version: 2010-02-12
*
- * License: GPL-2+
+ * Purpose: interpreter readline completions
*
- * Initial version: 2010-02-12
- *
- * CNModel runner (interpreter)
+ * License: GPL
*/
-
-#include <stdio.h>
-
#if HAVE_CONFIG_H && !defined(VERSION)
# include "config.h"
#endif
+#include <cstdio>
+
#ifdef HAVE_LIBREADLINE
# if defined(HAVE_READLINE_READLINE_H)
# include <readline/readline.h>
@@ -31,8 +31,8 @@
# endif
#endif
-#include "runner.hh"
#include "libcn/model.hh"
+#include "cnrun.hh"
using namespace std;
using namespace cnrun;
@@ -42,25 +42,25 @@ using namespace cnrun;
static char*
cnrun_null_generator( const char* text, int state)
{
- return nullptr;
+ return nullptr;
}
static char*
cnrun_cmd_generator( const char* text, int state)
{
- static int list_index, len;
+ static int list_index, len;
const char *name;
if ( !state ) {
- list_index = 0;
- len = strlen( text);
+ list_index = 0;
+ len = strlen( text);
}
while ( (name = cnrun_cmd[list_index]) ) {
- list_index++;
- if ( strncmp( name, text, len) == 0 )
- return strdup( name);
+ list_index++;
+ if ( strncmp( name, text, len) == 0 )
+ return strdup( name);
}
return nullptr;
}
@@ -68,18 +68,18 @@ cnrun_cmd_generator( const char* text, int state)
static char*
cnrun_source_types_generator( const char* text, int state)
{
- static int list_index, len;
+ static int list_index, len;
const char *name;
if ( !state ) {
- list_index = 0;
- len = strlen( text);
+ list_index = 0;
+ len = strlen( text);
}
while ( (name = __SourceTypes[list_index]) ) {
- list_index++;
- if ( strncmp( name, text, len) == 0 )
- return strdup( name);
+ list_index++;
+ if ( strncmp( name, text, len) == 0 )
+ return strdup( name);
}
return nullptr;
}
@@ -93,26 +93,26 @@ cnrun_source_types_generator( const char* text, int state)
static char*
cnrun_neu_type_generator( const char *text, int state)
{
- static const char** neuron_types = nullptr;
- if ( !neuron_types ) {
- if ( !(neuron_types = (const char**)malloc( (NT_LAST - NT_FIRST+1+1)*sizeof(char*))) )
- abort();
- size_t n;
- for ( n = 0; n <= NT_LAST - NT_FIRST; n++ )
- neuron_types[n] = strdup( __CNUDT[NT_FIRST+n].species); // family would do just as well
- neuron_types[n] = nullptr;
- }
-
- static int list_index, len;
+ static const char** neuron_types = nullptr;
+ if ( !neuron_types ) {
+ if ( !(neuron_types = (const char**)malloc( (NT_LAST - NT_FIRST+1+1)*sizeof(char*))) )
+ abort();
+ size_t n;
+ for ( n = 0; n <= NT_LAST - NT_FIRST; n++ )
+ neuron_types[n] = strdup( __CNUDT[NT_FIRST+n].species); // family would do just as well
+ neuron_types[n] = nullptr;
+ }
+
+ static int list_index, len;
const char *name;
if ( !state ) {
- list_index = 0;
- len = strlen( text);
+ list_index = 0;
+ len = strlen( text);
}
while ( (name = neuron_types[list_index]) ) {
- list_index++;
- if ( strncmp( name, text, len) == 0 )
- return strdup( name);
+ list_index++;
+ if ( strncmp( name, text, len) == 0 )
+ return strdup( name);
}
return nullptr;
}
@@ -122,27 +122,27 @@ cnrun_neu_type_generator( const char *text, int state)
static char*
cnrun_syn_type_generator( const char *text, int state)
{
- static const char** synapse_types = nullptr;
- if ( !synapse_types ) {
- if ( !(synapse_types = (const char**)malloc( (YT_LAST - YT_FIRST+1+1)*sizeof(char*))) )
- abort();
- size_t n, i;
- for ( n = i = 0; n <= YT_LAST - YT_FIRST; n++ )
- synapse_types[i++] = strdup( __CNUDT[YT_FIRST+n].family);
- // there are fewer families than species, so we are wasting some tens of bytes here. oh well.
- synapse_types[i] = nullptr;
- }
-
- static int list_index, len;
+ static const char** synapse_types = nullptr;
+ if ( !synapse_types ) {
+ if ( !(synapse_types = (const char**)malloc( (YT_LAST - YT_FIRST+1+1)*sizeof(char*))) )
+ abort();
+ size_t n, i;
+ for ( n = i = 0; n <= YT_LAST - YT_FIRST; n++ )
+ synapse_types[i++] = strdup( __CNUDT[YT_FIRST+n].family);
+ // there are fewer families than species, so we are wasting some tens of bytes here. oh well.
+ synapse_types[i] = nullptr;
+ }
+
+ static int list_index, len;
const char *name;
if ( !state ) {
- list_index = 0;
- len = strlen( text);
+ list_index = 0;
+ len = strlen( text);
}
while ( (name = synapse_types[list_index]) ) {
- list_index++;
- if ( strncmp( name, text, len) == 0 )
- return strdup( name);
+ list_index++;
+ if ( strncmp( name, text, len) == 0 )
+ return strdup( name);
}
return nullptr;
}
@@ -160,41 +160,41 @@ static int restrict_generated_set = 0;
static char*
cnrun_unit_label_generator( const char *text, int state)
{
- static int list_index, len;
+ static int list_index, len;
const char *name;
- static char** unit_labels = nullptr;
-
- if ( regenerate_unit_labels ) {
- regenerate_unit_labels = false;
-
- if ( !Model ) {
- free( unit_labels);
- unit_labels = nullptr;
- return nullptr;
- }
-
- if ( !(unit_labels = (char**)realloc( unit_labels, (Model->units()+1) * sizeof(char*))) )
- abort();
- size_t n = 0;
- for_model_units (Model, U)
- if ( ((restrict_generated_set & GENERATE_NEURONS) && (*U)->is_neuron()) ||
- ((restrict_generated_set & GENERATE_SYNAPSES) && (*U)->is_synapse()) )
- unit_labels[n++] = strdup( (*U) -> label());
- unit_labels[n] = nullptr;
- }
+ static char** unit_labels = nullptr;
+
+ if ( regenerate_unit_labels ) {
+ regenerate_unit_labels = false;
+
+ if ( !Model ) {
+ free( unit_labels);
+ unit_labels = nullptr;
+ return nullptr;
+ }
+
+ if ( !(unit_labels = (char**)realloc( unit_labels, (Model->units()+1) * sizeof(char*))) )
+ abort();
+ size_t n = 0;
+ for_model_units (Model, U)
+ if ( ((restrict_generated_set & GENERATE_NEURONS) && (*U)->is_neuron()) ||
+ ((restrict_generated_set & GENERATE_SYNAPSES) && (*U)->is_synapse()) )
+ unit_labels[n++] = strdup( (*U) -> label());
+ unit_labels[n] = nullptr;
+ }
- if ( !unit_labels )
- return nullptr;
+ if ( !unit_labels )
+ return nullptr;
if ( !state ) {
- list_index = 0;
- len = strlen( text);
+ list_index = 0;
+ len = strlen( text);
}
while ( (name = unit_labels[list_index]) ) {
- list_index++;
- if ( strncmp( name, text, len) == 0 )
- return strdup( name);
+ list_index++;
+ if ( strncmp( name, text, len) == 0 )
+ return strdup( name);
}
return nullptr;
}
@@ -206,36 +206,36 @@ bool cnrun::regenerate_var_names = true;
static char*
cnrun_var_names_generator( const char *text, int state)
{
- static int list_index, len;
+ static int list_index, len;
const char *name;
- static char** var_names = nullptr;
+ static char** var_names = nullptr;
- if ( regenerate_var_names ) {
- regenerate_var_names = false;
+ if ( regenerate_var_names ) {
+ regenerate_var_names = false;
- if ( current_shell_variables->size() == 0 )
- return nullptr;
+ if ( current_shell_variables->size() == 0 )
+ return nullptr;
- if ( !(var_names = (char**)realloc( var_names, (current_shell_variables->size()+1) * sizeof(char*))) )
- abort();
- size_t n = 0;
- for ( auto &v : *current_shell_variables )
- var_names[n++] = strdup( v.name);
- var_names[n] = nullptr;
- }
+ if ( !(var_names = (char**)realloc( var_names, (current_shell_variables->size()+1) * sizeof(char*))) )
+ abort();
+ size_t n = 0;
+ for ( auto &v : *current_shell_variables )
+ var_names[n++] = strdup( v.name);
+ var_names[n] = nullptr;
+ }
- if ( !var_names )
- return nullptr;
+ if ( !var_names )
+ return nullptr;
if ( !state ) {
- list_index = 0;
- len = strlen( text);
+ list_index = 0;
+ len = strlen( text);
}
while ( (name = var_names[list_index]) ) {
- list_index++;
- if ( strncmp( name, text, len) == 0 )
- return strdup( name);
+ list_index++;
+ if ( strncmp( name, text, len) == 0 )
+ return strdup( name);
}
return nullptr;
}
@@ -249,36 +249,36 @@ bool cnrun::regenerate_source_ids = true;
static char*
cnrun_source_id_generator( const char *text, int state)
{
- static int list_index, len;
+ static int list_index, len;
const char *name;
- static char** source_ids = nullptr;
+ static char** source_ids = nullptr;
- if ( regenerate_source_ids ) {
- regenerate_source_ids = false;
+ if ( regenerate_source_ids ) {
+ regenerate_source_ids = false;
- if ( !Model || Model->Sources.size() == 0 )
- return nullptr;
+ if ( !Model || Model->Sources.size() == 0 )
+ return nullptr;
- if ( !(source_ids = (char**)realloc( source_ids, (Model->Sources.size()+1) * sizeof(char*))) )
- abort();
- size_t n = 0;
- for ( auto &v : Model->Sources )
- source_ids[n++] = strdup( v->name.c_str());
- source_ids[n] = nullptr;
- }
+ if ( !(source_ids = (char**)realloc( source_ids, (Model->Sources.size()+1) * sizeof(char*))) )
+ abort();
+ size_t n = 0;
+ for ( auto &v : Model->Sources )
+ source_ids[n++] = strdup( v->name.c_str());
+ source_ids[n] = nullptr;
+ }
- if ( !source_ids )
- return nullptr;
+ if ( !source_ids )
+ return nullptr;
if ( !state ) {
- list_index = 0;
- len = strlen( text);
+ list_index = 0;
+ len = strlen( text);
}
while ( (name = source_ids[list_index]) ) {
- list_index++;
- if ( strncmp( name, text, len) == 0 )
- return strdup( name);
+ list_index++;
+ if ( strncmp( name, text, len) == 0 )
+ return strdup( name);
}
return nullptr;
}
@@ -293,45 +293,45 @@ static char *synapse_target_label_completing_for = nullptr;
static char*
cnrun_parm_names_generator( const char *text, int state)
{
- static int list_index, len;
+ static int list_index, len;
const char *name;
- if ( !Model )
- return nullptr;
- C_BaseSynapse *y;
- TUnitType t;
- C_BaseUnit *u1, *u2;
- if ( synapse_target_label_completing_for )
- if ( (u1 = Model->unit_by_label( unit_label_completing_for)) && u1->is_neuron() &&
- (u2 = Model->unit_by_label( synapse_target_label_completing_for)) && u2->is_neuron() &&
- (y = (static_cast<C_BaseNeuron*>(u1)) -> connects_via( *static_cast<C_BaseNeuron*>(u2))) )
- t = y->type();
- else
- return nullptr;
- else
- t = Model -> unit_by_label( unit_label_completing_for) -> type();
- if ( t == NT_VOID )
- return nullptr;
-
- if ( !(parm_names = (char**)realloc( parm_names, (__CNUDT[t].pno+1) * sizeof(char*))) )
- abort();
- size_t n, p;
- for ( n = p = 0; p < __CNUDT[t].pno; p++ )
- if ( __cn_verbosely > 5 || __CNUDT[t].stock_param_syms[p][0] != '.' )
- parm_names[n++] = strdup( __CNUDT[t].stock_param_syms[p]);
- parm_names[n] = nullptr;
-
- if ( !parm_names )
- return nullptr;
+ if ( !Model )
+ return nullptr;
+ C_BaseSynapse *y;
+ TUnitType t;
+ C_BaseUnit *u1, *u2;
+ if ( synapse_target_label_completing_for )
+ if ( (u1 = Model->unit_by_label( unit_label_completing_for)) && u1->is_neuron() &&
+ (u2 = Model->unit_by_label( synapse_target_label_completing_for)) && u2->is_neuron() &&
+ (y = (static_cast<C_BaseNeuron*>(u1)) -> connects_via( *static_cast<C_BaseNeuron*>(u2))) )
+ t = y->type();
+ else
+ return nullptr;
+ else
+ t = Model -> unit_by_label( unit_label_completing_for) -> type();
+ if ( t == NT_VOID )
+ return nullptr;
+
+ if ( !(parm_names = (char**)realloc( parm_names, (__CNUDT[t].pno+1) * sizeof(char*))) )
+ abort();
+ size_t n, p;
+ for ( n = p = 0; p < __CNUDT[t].pno; p++ )
+ if ( __cn_verbosely > 5 || __CNUDT[t].stock_param_syms[p][0] != '.' )
+ parm_names[n++] = strdup( __CNUDT[t].stock_param_syms[p]);
+ parm_names[n] = nullptr;
+
+ if ( !parm_names )
+ return nullptr;
if ( !state ) {
- list_index = 0;
- len = strlen( text);
+ list_index = 0;
+ len = strlen( text);
}
while ( (name = parm_names[list_index]) ) {
- list_index++;
- if ( strncmp( name, text, len) == 0 )
- return strdup( name);
+ list_index++;
+ if ( strncmp( name, text, len) == 0 )
+ return strdup( name);
}
return nullptr;
}
@@ -345,16 +345,16 @@ rl_point_at_word() __attribute__ ((pure));
static int
rl_point_at_word()
{
- int p = 0, delims = 0;
- while ( p < rl_point ) {
- if ( isspace(rl_line_buffer[p]) ) {
- delims++;
- do p++;
- while ( p < rl_point && isspace(rl_line_buffer[p]) );
- }
- p++;
- }
- return delims;
+ int p = 0, delims = 0;
+ while ( p < rl_point ) {
+ if ( isspace(rl_line_buffer[p]) ) {
+ delims++;
+ do p++;
+ while ( p < rl_point && isspace(rl_line_buffer[p]) );
+ }
+ p++;
+ }
+ return delims;
}
@@ -363,90 +363,94 @@ char**
cnrun::
cnrun_completion( const char *text, int start, int end)
{
- if ( start == 0 )
- return rl_completion_matches( text, &cnrun_cmd_generator);
-
- char *line_buffer = strdupa( rl_line_buffer),
- *cmd = strtok( line_buffer, " \t");
-
- if ( strcmp( cmd, cnrun_cmd[CNCMD_add_neuron]) == 0 ) {
- switch ( rl_point_at_word() ) {
- case 1: return rl_completion_matches( text, &cnrun_neu_type_generator);
- default: return rl_completion_matches( text, &cnrun_null_generator);
- }
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_add_synapse]) == 0 ) {
- switch ( rl_point_at_word() ) {
- case 1: return rl_completion_matches( text, &cnrun_syn_type_generator);
- case 2:
- case 3: return (restrict_generated_set = 0|GENERATE_NEURONS,
- rl_completion_matches( text, &cnrun_unit_label_generator));
- default: return rl_completion_matches( text, &cnrun_null_generator);
- }
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_load_nml]) == 0 ) {
- return nullptr; // use built-in filename completion
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_show_units]) == 0 ||
- strcmp( cmd, cnrun_cmd[CNCMD_decimate]) == 0 ||
- strcmp( cmd, cnrun_cmd[CNCMD_start_listen]) == 0 ||
- strcmp( cmd, cnrun_cmd[CNCMD_stop_listen]) == 0 ||
- strcmp( cmd, cnrun_cmd[CNCMD_start_log_spikes]) == 0 ||
- strcmp( cmd, cnrun_cmd[CNCMD_stop_log_spikes]) == 0 ||
- strcmp( cmd, cnrun_cmd[CNCMD_putout]) == 0 ) {
- return (rl_point_at_word() == 1) ? (restrict_generated_set = 0|GENERATE_NEURONS|GENERATE_SYNAPSES,
- rl_completion_matches( text, &cnrun_unit_label_generator)) : nullptr;
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_show_vars]) == 0 ||
- strcmp( cmd, cnrun_cmd[CNCMD_clear_vars]) == 0 ||
- strcmp( cmd, cnrun_cmd[CNCMD_listen_dt]) == 0 ||
- strcmp( cmd, cnrun_cmd[CNCMD_integration_dt_min]) == 0 ||
- strcmp( cmd, cnrun_cmd[CNCMD_integration_dt_max]) == 0 ||
- strcmp( cmd, cnrun_cmd[CNCMD_integration_dt_cap]) == 0 ) {
- return (rl_point_at_word() == 1) ? rl_completion_matches( text, cnrun_var_names_generator) : nullptr;
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_set_parm_neuron]) == 0 ) {
- switch ( rl_point_at_word() ) {
- case 1: restrict_generated_set = 0|GENERATE_NEURONS;
- return rl_completion_matches( text, cnrun_unit_label_generator);
- case 2: unit_label_completing_for = strtok( nullptr, " ");
- synapse_target_label_completing_for = nullptr;
- return rl_completion_matches( text, cnrun_parm_names_generator);
- default: return rl_completion_matches( text, cnrun_var_names_generator);
- }
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_set_parm_synapse]) == 0 ) {
- switch ( rl_point_at_word() ) {
- case 1:
- case 2: restrict_generated_set = 0|GENERATE_NEURONS;
- return rl_completion_matches( text, cnrun_unit_label_generator);
- case 3: unit_label_completing_for = strtok( nullptr, " ");
- synapse_target_label_completing_for = strtok( nullptr, " ");
- return rl_completion_matches( text, cnrun_parm_names_generator);
- default: return rl_completion_matches( text, cnrun_var_names_generator);
- }
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_connect_source]) == 0 ) {
- switch ( rl_point_at_word() ) {
- case 1: return rl_completion_matches( text, &cnrun_source_id_generator);
- case 2: restrict_generated_set = 0|GENERATE_NEURONS|GENERATE_SYNAPSES;
- return rl_completion_matches( text, &cnrun_unit_label_generator);
- case 3: unit_label_completing_for = (strtok( nullptr, " "), strtok( nullptr, " "));
- synapse_target_label_completing_for = nullptr;
- return rl_completion_matches( text, cnrun_parm_names_generator);
- default: return rl_completion_matches( text, cnrun_null_generator);
- }
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_new_source]) == 0 ) {
- switch ( rl_point_at_word() ) {
- case 1: return rl_completion_matches( text, cnrun_source_types_generator);
- default: return rl_completion_matches( text, cnrun_null_generator);
- }
-
- } else {
- return nullptr;
- }
+ if ( start == 0 )
+ return rl_completion_matches( text, &cnrun_cmd_generator);
+
+ char *line_buffer = strdupa( rl_line_buffer),
+ *cmd = strtok( line_buffer, " \t");
+
+ if ( strcmp( cmd, cnrun_cmd[CNCMD_add_neuron]) == 0 ) {
+ switch ( rl_point_at_word() ) {
+ case 1: return rl_completion_matches( text, &cnrun_neu_type_generator);
+ default: return rl_completion_matches( text, &cnrun_null_generator);
+ }
+
+ } else if ( strcmp( cmd, cnrun_cmd[CNCMD_add_synapse]) == 0 ) {
+ switch ( rl_point_at_word() ) {
+ case 1: return rl_completion_matches( text, &cnrun_syn_type_generator);
+ case 2:
+ case 3: return (restrict_generated_set = 0|GENERATE_NEURONS,
+ rl_completion_matches( text, &cnrun_unit_label_generator));
+ default: return rl_completion_matches( text, &cnrun_null_generator);
+ }
+
+ } else if ( strcmp( cmd, cnrun_cmd[CNCMD_load_nml]) == 0 ) {
+ return nullptr; // use built-in filename completion
+
+ } else if ( strcmp( cmd, cnrun_cmd[CNCMD_show_units]) == 0 ||
+ strcmp( cmd, cnrun_cmd[CNCMD_decimate]) == 0 ||
+ strcmp( cmd, cnrun_cmd[CNCMD_start_listen]) == 0 ||
+ strcmp( cmd, cnrun_cmd[CNCMD_stop_listen]) == 0 ||
+ strcmp( cmd, cnrun_cmd[CNCMD_start_log_spikes]) == 0 ||
+ strcmp( cmd, cnrun_cmd[CNCMD_stop_log_spikes]) == 0 ||
+ strcmp( cmd, cnrun_cmd[CNCMD_putout]) == 0 ) {
+ return (rl_point_at_word() == 1) ? (restrict_generated_set = 0|GENERATE_NEURONS|GENERATE_SYNAPSES,
+ rl_completion_matches( text, &cnrun_unit_label_generator)) : nullptr;
+
+ } else if ( strcmp( cmd, cnrun_cmd[CNCMD_show_vars]) == 0 ||
+ strcmp( cmd, cnrun_cmd[CNCMD_clear_vars]) == 0 ||
+ strcmp( cmd, cnrun_cmd[CNCMD_listen_dt]) == 0 ||
+ strcmp( cmd, cnrun_cmd[CNCMD_integration_dt_min]) == 0 ||
+ strcmp( cmd, cnrun_cmd[CNCMD_integration_dt_max]) == 0 ||
+ strcmp( cmd, cnrun_cmd[CNCMD_integration_dt_cap]) == 0 ) {
+ return (rl_point_at_word() == 1) ? rl_completion_matches( text, cnrun_var_names_generator) : nullptr;
+
+ } else if ( strcmp( cmd, cnrun_cmd[CNCMD_set_parm_neuron]) == 0 ) {
+ switch ( rl_point_at_word() ) {
+ case 1: restrict_generated_set = 0|GENERATE_NEURONS;
+ return rl_completion_matches( text, cnrun_unit_label_generator);
+ case 2: unit_label_completing_for = strtok( nullptr, " ");
+ synapse_target_label_completing_for = nullptr;
+ return rl_completion_matches( text, cnrun_parm_names_generator);
+ default: return rl_completion_matches( text, cnrun_var_names_generator);
+ }
+
+ } else if ( strcmp( cmd, cnrun_cmd[CNCMD_set_parm_synapse]) == 0 ) {
+ switch ( rl_point_at_word() ) {
+ case 1:
+ case 2: restrict_generated_set = 0|GENERATE_NEURONS;
+ return rl_completion_matches( text, cnrun_unit_label_generator);
+ case 3: unit_label_completing_for = strtok( nullptr, " ");
+ synapse_target_label_completing_for = strtok( nullptr, " ");
+ return rl_completion_matches( text, cnrun_parm_names_generator);
+ default: return rl_completion_matches( text, cnrun_var_names_generator);
+ }
+
+ } else if ( strcmp( cmd, cnrun_cmd[CNCMD_connect_source]) == 0 ) {
+ switch ( rl_point_at_word() ) {
+ case 1: return rl_completion_matches( text, &cnrun_source_id_generator);
+ case 2: restrict_generated_set = 0|GENERATE_NEURONS|GENERATE_SYNAPSES;
+ return rl_completion_matches( text, &cnrun_unit_label_generator);
+ case 3: unit_label_completing_for = (strtok( nullptr, " "), strtok( nullptr, " "));
+ synapse_target_label_completing_for = nullptr;
+ return rl_completion_matches( text, cnrun_parm_names_generator);
+ default: return rl_completion_matches( text, cnrun_null_generator);
+ }
+
+ } else if ( strcmp( cmd, cnrun_cmd[CNCMD_new_source]) == 0 ) {
+ switch ( rl_point_at_word() ) {
+ case 1: return rl_completion_matches( text, cnrun_source_types_generator);
+ default: return rl_completion_matches( text, cnrun_null_generator);
+ }
+
+ } else {
+ return nullptr;
+ }
}
-
-// EOF
+// Local Variables:
+// Mode: c++
+// indent-tabs-mode: nil
+// tab-width: 8
+// c-basic-offset: 8
+// End:
diff --git a/upstream/src/cnrun/interpreter.cc b/upstream/src/cnrun/interpreter.cc
index 323c59e..279d8d8 100644
--- a/upstream/src/cnrun/interpreter.cc
+++ b/upstream/src/cnrun/interpreter.cc
@@ -1,836 +1,273 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
+ * File name: cnrun/interpreter.cc
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2010-02-12
*
- * License: GPL-2+
+ * Purpose: CModel runner, using Lua.
*
- * Initial version: 2010-02-12
- *
- * CNModel runner (interpreter)
+ * License: GPL
*/
-
-#include <sys/stat.h>
-#include <stdio.h>
-#include <unistd.h>
-#include <cassert>
-#include <regex.h>
-#include <list>
-
#if HAVE_CONFIG_H && !defined(VERSION)
# include "config.h"
#endif
-#if defined(HAVE_READLINE_READLINE_H)
-# include <readline/readline.h>
-#elif defined(HAVE_READLINE_H)
-# include <readline.h>
-#endif
+#include <list>
-#if defined(HAVE_READLINE_HISTORY_H)
-# include <readline/history.h>
-#elif defined(HAVE_HISTORY_H)
-# include <history.h>
-#endif
+extern "C" {
+#include <lua.h>
+#include <lualib.h>
+#include <lauxlib.h>
+}
-#include "libstilton/string.hh"
-#include "libstilton/exprparser.hh"
-#include "libcn/integrate-rk65.hh"
-#include "libcn/base-unit.hh"
-#include "runner.hh"
+#include "cnrun.hh"
using namespace std;
using namespace cnrun;
-const char* const cnrun::cnrun_cmd[] = {
- "new_model",
- "load_nml",
- "merge_nml",
- "add_neuron",
- "add_synapse",
- "reset",
- "reset_revert_params",
- "reset_state_units",
- "advance_until",
- "advance",
- "putout",
- "decimate",
- "start_listen",
- "stop_listen",
- "listen_dt",
- "listen_mode",
- "integration_dt_min",
- "integration_dt_max",
- "integration_dt_cap",
- "start_log_spikes",
- "stop_log_spikes",
- "sxf_params",
- "new_source",
- "show_sources",
- "connect_source",
- "disconnect_source",
- "set_parm_neuron",
- "set_parm_synapse",
- "cull_deaf_synapses",
- "describe_model",
- "show_units",
- "exec",
- "verbosity",
- "show_vars",
- "clear_vars",
- "pause",
- "quit",
- nullptr
-};
-
-
-
-list<cnrun::stilton::SVariable>
- *cnrun::CInterpreter::current_env;
-
-
-
-namespace {
-
-void
-report_script_issue( const char *fname, int lineno, int vrb, const char* fmt, ...)
+cnrun::CInterpreterShell::
+CInterpreterShell (const SInterpOptions& options_)
+ : options (options_)
{
- using namespace stilton::str;
- va_list ap;
- va_start (ap, fmt);
- string body = svasprintf( fmt, ap);
- va_end (ap);
-
- string pre = ( lineno > 0 )
- ? sasprintf( "%s:%d: %s", fname, lineno, body.c_str())
- : sasprintf( "%s: %s", fname, body.c_str());
+ lua_state = luaL_newstate();
+ luaL_openlibs( lua_state);
}
-int do_single_cmd( const char*,
- list<stilton::SVariable> &varlist,
- int level = 0, const char *fname = "",
- unsigned lineno = -1);
-
+cnrun::CInterpreterShell::
+~CInterpreterShell ()
+{
+ for ( auto& M : models )
+ delete M.second;
+ lua_close( lua_state);
+}
-#define CN_INTERP_EXIT 1
-#define CN_INTERP_WARN -1
-#define CN_INTERP_PARSEERROR -2
-#define CN_INTERP_SYSFAIL -3
+namespace {
-#define CNRUN_HISTORY ".cnrun-history"
+struct SCmdDesc {
+ const char* id;
+ //enum class TAType { aint, afloat, astr, };
+ //vector<TAType> arguments;
+ const char* arg_sig;
+ //CInterpreterShell::TCmdResult (CInterpreterShell::* fun)(CInterpreterShell::TArgs&);
+ decltype(&CInterpreterShell::cmd_new_model) fun;
+};
+const SCmdDesc Commands[] = {
+ { "new_model", "s", &CInterpreterShell::cmd_new_model },
+ { "delete_model", "s", &CInterpreterShell::cmd_delete_model },
+ { "import_nml", "ss", &CInterpreterShell::cmd_import_nml },
+ { "export_nml", "ss", &CInterpreterShell::cmd_export_nml },
+ { "reset_model", "s", &CInterpreterShell::cmd_reset_model },
+ { "cull_deaf_synapses", "s", &CInterpreterShell::cmd_cull_deaf_synapses },
+ { "describe_model", "s", &CInterpreterShell::cmd_describe_model },
+ { "get_model_parameter", "ss", &CInterpreterShell::cmd_get_model_parameter },
+ { "set_model_parameter", "sss", &CInterpreterShell::cmd_set_model_parameter },
+ { "advance", "sg", &CInterpreterShell::cmd_advance },
+ { "advance_until", "sg", &CInterpreterShell::cmd_advance_until },
+
+ { "new_neuron", "sss", &CInterpreterShell::cmd_new_neuron },
+ { "new_synapse", "ssssg", &CInterpreterShell::cmd_new_synapse },
+ { "get_unit_properties", "ss", &CInterpreterShell::cmd_get_unit_properties },
+ { "get_unit_parameter", "sss", &CInterpreterShell::cmd_get_unit_parameter },
+ { "set_unit_parameter", "sssg", &CInterpreterShell::cmd_set_unit_parameter },
+ { "get_unit_vars", "ss", &CInterpreterShell::cmd_get_unit_vars },
+ { "reset_unit", "ss", &CInterpreterShell::cmd_reset_unit },
+
+ { "get_units_matching", "ss", &CInterpreterShell::cmd_get_units_matching },
+ { "get_units_of_type", "ss", &CInterpreterShell::cmd_get_units_of_type },
+ { "set_matching_neuron_parameter", "sssg", &CInterpreterShell::cmd_set_matching_neuron_parameter },
+ { "set_matching_synapse_parameter", "ssssg", &CInterpreterShell::cmd_set_matching_synapse_parameter },
+ { "revert_matching_unit_parameters", "ss", &CInterpreterShell::cmd_revert_matching_unit_parameters },
+ { "decimate", "ssg", &CInterpreterShell::cmd_decimate },
+ { "putout", "ss", &CInterpreterShell::cmd_putout },
+
+ { "new_tape_source", "sssb", &CInterpreterShell::cmd_new_tape_source },
+ { "new_periodic_source", "sssbg", &CInterpreterShell::cmd_new_periodic_source },
+ { "new_noise_source", "ssgggs", &CInterpreterShell::cmd_new_noise_source },
+ { "get_sources", "s", &CInterpreterShell::cmd_get_sources },
+ { "connect_source", "ssss", &CInterpreterShell::cmd_connect_source },
+ { "disconnect_source", "ssss", &CInterpreterShell::cmd_disconnect_source },
+
+ { "start_listen", "ss", &CInterpreterShell::cmd_start_listen },
+ { "stop_listen", "ss", &CInterpreterShell::cmd_stop_listen },
+ { "start_log_spikes", "ss", &CInterpreterShell::cmd_start_log_spikes },
+ { "stop_log_spikes", "ss", &CInterpreterShell::cmd_stop_log_spikes },
+};
+}
-int
-cnrun::CInterpreter::
-new_model( const string& model_name, const string& fname, size_t lineno)
+list<string>
+cnrun::CInterpreterShell::
+list_commands()
{
- if ( model )
- delete model;
-
- model = new CModel(
- model_name,
- new CIntegrateRK65(
- Options.integration_dt_min,
- Options.integration_dt_max,
- Options.integration_dt_max_cap),
- Options);
- if ( !model ) {
- report_script_issue( fname, lineno, -1, "Failed to create model");
- return CN_INTERP_SYSFAIL;
- }
-
- model->vp( 3,
- "generator type: %s\n"
- " seed = %lu\n"
- " first value = %lu\n",
- gsl_rng_name (model->_rng),
- gsl_rng_default_seed,
- gsl_rng_get (model->_rng));
-
- return 0;
+ list<string> ret;
+ for ( auto& cs : Commands )
+ ret.push_back( {cs.id});
+ return move(ret);
}
-
-
-
-
+namespace {
+extern "C"
int
-do_single_cmd( const char* raw,
- list<SVariable> &varlist,
- int level, const char *fname, unsigned lineno)
+host_fun( lua_State* L) // -> nargsout
{
- string raw_s( raw);
- char *cmd = strtok( &raw_s[0], " \t"),
- *operand = strtok( nullptr, "\n");
-
- CExpression expr;
- double result;
-
-#define CHECK_MODEL \
- if ( !Model ) { \
- report_script_issue( fname, lineno, -1, "No model loaded"); \
- return CN_INTERP_WARN; \
+ auto reperr = [&L] (const char* str)
+ {
+ lua_pushboolean( L, false);
+ lua_pushfstring( L, str);
+ };
+
+ size_t nargsin = lua_gettop(L) - 2; // the first two being, a CScoreAssistant* and opcode
+
+ auto this_p = (CInterpreterShell*)lua_touserdata( L, 1);
+ if ( !this_p ) {
+ reperr( "Opaque shell blob object is NULL");
+ return 2;
}
+ const char* opcode = lua_tostring( L, 2);
- if ( strcmp( cmd, cnrun_cmd[CNCMD_new_model]) == 0 ) {
- if ( !operand ) {
- report_script_issue( fname, lineno, -1, "Missing a name for the new model");
- return CN_INTERP_PARSEERROR;
- }
- delete Model;
-
- regenerate_unit_labels = true;
- return new_model( operand, fname, lineno);
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_load_nml]) == 0 ) {
- struct stat s;
- if ( stat( operand, &s) ) {
- report_script_issue( fname, lineno, -1, "No such file: \"%s\"", operand);
- return CN_INTERP_SYSFAIL;
- }
-
- int retval = new_model( operand, fname, lineno);
- if ( retval )
- return retval;
-
- if ( Model->import_NetworkML( operand, false) < 0 ) {
- report_script_issue( fname, lineno, -1, "Failed to create model topology from \"%s\"", operand);
- delete Model;
- Model = nullptr;
- return CN_INTERP_SYSFAIL;
- }
-
- Model -> cull_blind_synapses();
- regenerate_unit_labels = true;
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_merge_nml]) == 0 ) {
- CHECK_MODEL;
- struct stat s;
- if ( stat( operand, &s) ) {
- report_script_issue( fname, lineno, -1, "No such file: \"%s\"", operand);
- return CN_INTERP_SYSFAIL;
- }
- if ( Model->import_NetworkML( operand, true) < 0 ) {
- report_script_issue( fname, lineno, -1, "Failed to import topology from \"%s\"", operand);
- return CN_INTERP_SYSFAIL;
- }
-
- regenerate_unit_labels = true;
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_add_neuron]) == 0 ) {
- CHECK_MODEL;
- char *type_s, *label_s;
- if ( !operand ||
- !(type_s = (strtok( operand, " \t"))) ||
- !(label_s = strtok( nullptr, "\n")) ) {
- report_script_issue( fname, lineno, -1, "Missing neuron type and/or label in `add_neuron'");
- return CN_INTERP_PARSEERROR;
- }
- if ( !Model->add_neuron_species( type_s, label_s, true) ) {
- report_script_issue( fname, lineno, -1, "`add_neuron' failed");
- return CN_INTERP_PARSEERROR;
- }
- regenerate_unit_labels = true;
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_add_synapse]) == 0 ) {
- CHECK_MODEL;
- char *type_s, *src_s, *tgt_s, *g_s;
- if ( !operand ||
- !(type_s = (strtok( operand, " \t"))) ||
- !(src_s = strtok( nullptr, " \t")) ||
- !(tgt_s = strtok( nullptr, " \t")) ||
- !(g_s = strtok( nullptr, "\n")) ) {
- report_script_issue( fname, lineno, -1, "Missing synapse type, source or target label, and/or gsyn in `add_synapse'");
- return CN_INTERP_PARSEERROR;
- }
- double g;
- if ( expr( g_s, g, &varlist) ) {
- report_script_issue( fname, lineno, -1, "Bad value for gsyn in `add_synapse'");
- return CN_INTERP_PARSEERROR;
- }
-
- if ( !Model->add_synapse_species( type_s, src_s, tgt_s, g, true, true) ) {
- report_script_issue( fname, lineno, -1, "`add_synapse' failed (reason given above)", operand);
- return CN_INTERP_SYSFAIL;
- }
- regenerate_unit_labels = true;
-
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_reset]) == 0 ) {
- CHECK_MODEL;
- Model->reset();
- lprintf( 0, "Reset model and state of all units");
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_reset_revert_params]) == 0 ) {
- CHECK_MODEL;
- Model->reset( true);
- lprintf( 0, "Reset model and reverted all units' state and parameters");
+ size_t argth = 2;
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_reset_state_units]) == 0 ) {
- CHECK_MODEL;
- if ( !operand )
- operand = const_cast<char*>(".*");
- regex_t RE;
- if ( 0 != regcomp( &RE, operand, REG_EXTENDED | REG_NOSUB) ) {
- report_script_issue( fname, lineno, -1, "Invalid regexp for `reset_state_units' arg");
- return CN_INTERP_PARSEERROR;
- }
- size_t cnt = 0;
- for_model_units (Model,U)
- if ( regexec( &RE, (*U)->label(), 0, 0, 0) == 0 ) {
- (*U) -> reset_state();
- ++cnt;
+ for ( auto& C : Commands )
+ if ( strcmp( opcode, C.id) == 0 ) {
+ if ( nargsin - 2 != strlen(C.arg_sig) ) {
+ reperr( stilton::str::sasprintf(
+ "Bad arity in call to %s (expecting %zu arg(s), got %zu",
+ opcode, strlen(C.arg_sig), nargsin - 2).c_str());
+ return 2;
}
- if ( cnt )
- lprintf( 0, "Reset %zd unit(s)", cnt);
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_advance_until]) == 0 ) {
- CHECK_MODEL;
- expr.silent = true;
- if ( !operand || expr( operand, result, &varlist) ) {
- report_script_issue( fname, lineno, -1, "No or bad time value for `advance_until'");
- return CN_INTERP_PARSEERROR;
- }
- if ( Model->model_time() > result ) {
- report_script_issue( fname, lineno, 0, "Cannot go back in time (now is %g)", Model->model_time());
- return CN_INTERP_WARN;
- }
-
- Model -> advance( result - Model->model_time());
- for_model_spikelogging_neurons (Model,N)
- (*N)->sync_spikelogging_history();
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_advance]) == 0 ) {
- CHECK_MODEL;
- if ( !operand || expr( operand, result, &varlist) ) {
- report_script_issue( fname, lineno, -1, "No or bad time value for `advance'");
- return CN_INTERP_PARSEERROR;
- }
-
- Model -> advance( result);
- for_model_spikelogging_neurons (Model,N)
- (*N)->sync_spikelogging_history();
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_putout]) == 0 ) {
- CHECK_MODEL;
- char *label_s;
- if ( !operand ||
- !(label_s = (strtok( operand, " \t"))) ) {
- report_script_issue( fname, lineno, -1, "Missing label in `putout'");
- return CN_INTERP_PARSEERROR;
- }
-
- list<CModel::STagGroup> tags;
- tags.push_back( CModel::STagGroup (label_s));
- Model->process_putout_tags( tags);
-
- regenerate_unit_labels = true;
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_decimate]) == 0 ) {
- CHECK_MODEL;
- char *label_s, *frac_s;
- if ( !operand ||
- !(label_s = (strtok( operand, " \t"))) ||
- !(frac_s = (strtok( nullptr, "\n"))) ) {
- report_script_issue( fname, lineno, -1, "Missing fraction or label in `decimate'");
- return CN_INTERP_PARSEERROR;
- }
- if ( expr( frac_s, result, &varlist) ) {
- report_script_issue( fname, lineno, -1, "Unparsable expression for decimation fraction: \"%s\"", operand);
- return CN_INTERP_PARSEERROR;
- }
- if ( result < 0. || result > 1. ) {
- report_script_issue( fname, lineno, -1, "Decimation fraction outside [0..1]");
- return CN_INTERP_PARSEERROR;
- }
-
- list<CModel::STagGroupDecimate> tags;
- tags.push_back( CModel::STagGroupDecimate( label_s, result));
- Model -> process_decimate_tags( tags);
-
- regenerate_unit_labels = true;
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_start_listen]) == 0 ) {
- CHECK_MODEL;
- if ( !operand ||
- !(operand = (strtok( operand, " \t")) ) ) {
- report_script_issue( fname, lineno, -1, "Missing label in `start_listen'");
- return CN_INTERP_PARSEERROR;
- }
- list<CModel::STagGroupListener> tags;
- tags.push_back( CModel::STagGroupListener (operand, true, 0
- | (Options.listen_1varonly ? CN_ULISTENING_1VARONLY : 0)
- | (Options.listen_deferwrite ? CN_ULISTENING_DEFERWRITE : 0)
- | (Options.listen_binary ? CN_ULISTENING_BINARY : CN_ULISTENING_DISK)));
- Model->process_listener_tags( tags);
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_stop_listen]) == 0 ) {
- CHECK_MODEL;
- if ( !operand ||
- !(operand = (strtok( operand, " \t"))) ) {
- report_script_issue( fname, lineno, -1, "Missing label in `stop_listen'");
- return CN_INTERP_PARSEERROR;
- }
- list<CModel::STagGroupListener> tags;
- tags.push_back( CModel::STagGroupListener (operand, false));
- Model->process_listener_tags( tags);
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_listen_dt]) == 0 ) {
- if ( !operand ) {
- lprintf( 0, "listen_dt is %g", Options.listen_dt);
- return 0;
- }
- if ( expr( operand, result, &varlist) ) {
- report_script_issue( fname, lineno, -1, "Unparsable expression for value in `listen_dt'");
- return CN_INTERP_PARSEERROR;
- }
- if ( Model )
- Model->listen_dt = Options.listen_dt = result;
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_listen_mode]) == 0 ) {
- if ( !operand )
- lprintf( 0, "listen mode is 1%sd%sb%s (%s%s%s)",
- Options.listen_1varonly ? "+" : "",
- Options.listen_deferwrite ? "+" : "",
- Options.listen_binary ? "+" : "",
- Options.listen_1varonly ? "one var-only, " : "all vars, ",
- Options.listen_deferwrite ? "deferred write, " : "continuous write, ",
- Options.listen_binary ? "binary" : "ascii");
- else {
- char *c;
- if ( (c = strchr( operand, '1')) ) Options.listen_1varonly = (*(c+1) != '-');
- if ( (c = strchr( operand, 'd')) ) Options.listen_deferwrite = (*(c+1) != '-');
- if ( (c = strchr( operand, 'b')) ) Options.listen_binary = (*(c+1) != '-');
- }
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_integration_dt_min]) == 0 ) {
- if ( !operand ) {
- lprintf( 0, "integration_dt_min is %g", Options.integration_dt_min);
- return 0;
- }
- if ( expr( operand, result, &varlist) ) {
- report_script_issue( fname, lineno, -1, "Unparsable expression for value in `integration_dt_min'");
- return CN_INTERP_PARSEERROR;
- }
- Options.integration_dt_min = result;
- if ( Model )
- Model->dt_min() = result;
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_integration_dt_max]) == 0 ) {
- if ( !operand ) {
- lprintf( 0, "integration_dt_max is %g", Options.integration_dt_max);
- return 0;
- }
- if ( expr( operand, result, &varlist) ) {
- report_script_issue( fname, lineno, -1, "Unparsable expression for value in `integration_dt_max'");
- return CN_INTERP_PARSEERROR;
- }
- Options.integration_dt_max = result;
- if ( Model )
- Model->dt_max() = result;
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_integration_dt_cap]) == 0 ) {
- if ( !operand ) {
- lprintf( 0, "integration_dt_cap is %g", Options.integration_dt_max_cap);
- return 0;
- }
- if ( expr( operand, result, &varlist) ) {
- report_script_issue( fname, lineno, -1, "Unparsable expression for value in `integration_dt_cap'");
- return CN_INTERP_PARSEERROR;
- }
- Options.integration_dt_max_cap = result;
- if ( Model )
- (static_cast<CIntegrateRK65*>(Model->_integrator)) -> _dt_max_cap = result;
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_set_sxf_params]) == 0 ) {
- if ( !operand ) {
- lprintf( 0, "sxf_start_delay:sxf_period:sdf_sigma is %g:%g:%g",
- Options.sxf_start_delay, Options.sxf_sample, Options.sdf_sigma);
- return 0;
- }
- if ( sscanf( operand, "%g:%g:%g",
- &Options.sxf_start_delay, &Options.sxf_sample, &Options.sdf_sigma) < 3 ) {
- report_script_issue( fname, lineno, -1, "Expecting <double>:<double>:<double> with set_sxf_params");
- return CN_INTERP_PARSEERROR;
- }
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_start_log_spikes]) == 0 ) {
- CHECK_MODEL;
- char *label_s;
- if ( !operand ||
- !(label_s = (strtok( operand, " \t"))) ) {
- report_script_issue( fname, lineno, -1, "Missing label in `start_log_spikes'");
- return CN_INTERP_PARSEERROR;
- }
- if ( Options.sxf_sample <= 0. || Options.sdf_sigma <= 0. ) {
- report_script_issue( fname, lineno, 1, "SDF parameters not set up, will only log spike times");
- }
- list<CModel::STagGroupSpikelogger> specs;
- specs.push_back( CModel::STagGroupSpikelogger (label_s, true,
- Options.sxf_sample, Options.sdf_sigma, Options.sxf_start_delay));
- Model->process_spikelogger_tags( specs);
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_stop_log_spikes]) == 0 ) {
- CHECK_MODEL;
- char *label_s;
- if ( !operand ||
- !(label_s = (strtok( operand, " \t"))) ) {
- report_script_issue( fname, lineno, -1, "Missing label in `stop_log_spikes'");
- return CN_INTERP_PARSEERROR;
- }
- list<CModel::STagGroupSpikelogger> specs;
- specs.push_back( CModel::STagGroupSpikelogger (label_s, false));
- Model->process_spikelogger_tags( specs);
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_set_parm_neuron]) == 0 ) {
- CHECK_MODEL;
- char *label_s, *parm_s, *value_s;
- if ( !operand ||
- !(label_s = (strtok( operand, " \t"))) ||
- !(parm_s = strtok( nullptr, " \t")) ||
- !(value_s = strtok( nullptr, "\n")) ) {
- report_script_issue( fname, lineno, -1, "Missing label, parameter or value in `set_parm_neuron'");
- return CN_INTERP_PARSEERROR;
- }
- if ( expr( value_s, result, &varlist) ) {
- report_script_issue( fname, lineno, -1, "Unparsable expression for value in `set_parm_neuron'");
- return CN_INTERP_PARSEERROR;
- }
- list<CModel::STagGroupNeuronParmSet> specs = { CModel::STagGroupNeuronParmSet (label_s, true, parm_s, result) };
- Model->process_paramset_static_tags( specs);
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_set_parm_synapse]) == 0 ) {
- CHECK_MODEL;
- char *src_s, *tgt_s, *parm_s, *value_s;
- if ( !operand ||
- !(src_s = (strtok( operand, " \t"))) ||
- !(tgt_s = (strtok( nullptr, " \t"))) ||
- !(parm_s = strtok( nullptr, " \t")) ||
- !(value_s = strtok( nullptr, "\n")) ) {
- report_script_issue( fname, lineno, -1, "Missing source or target label, parameter and/or value in `set_parm_synapse'");
- return CN_INTERP_PARSEERROR;
- }
- if ( expr( value_s, result, &varlist) ) {
- report_script_issue( fname, lineno, -1, "Unparsable value in `set_parm_synapse'");
- return CN_INTERP_PARSEERROR;
- }
- list<CModel::STagGroupSynapseParmSet> specs = { CModel::STagGroupSynapseParmSet (src_s, tgt_s, true, parm_s, result) };
- Model->process_paramset_static_tags( specs);
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_new_source]) == 0 ) {
- CHECK_MODEL;
- char *type_s, *name_s;
- if ( !operand ||
- !(type_s = (strtok( operand, " \t"))) ||
- !(name_s = (strtok( nullptr, " \t"))) ) {
- report_script_issue( fname, lineno, -1, "Missing source type or name in `new_source'");
- return CN_INTERP_PARSEERROR;
- }
-
- if ( Model->source_by_id( name_s) ) {
- report_script_issue( fname, lineno, -1, "A source named \"%s\" already exists", name_s);
- return CN_INTERP_PARSEERROR;
- }
-
- char *arg1, *arg2;
- if ( strcmp( type_s, __SourceTypes[SRC_TAPE]) == 0 ) {
- if ( !(arg1 = strtok( nullptr, "\n")) ) {
- report_script_issue( fname, lineno, -1, "Missing filename for a Tape source in `new_source'");
- return CN_INTERP_PARSEERROR;
- } else {
- CSourceTape *source = new CSourceTape( name_s, arg1);
- if ( source && source->name.size() )
- if ( count( Model->Sources.begin(), Model->Sources.end(), source) == 0 )
- Model->Sources.push_back( source);
- else {
- report_script_issue( fname, lineno, -1, "Duplicate name (\"%s\") for a source", arg1);
- return CN_INTERP_SYSFAIL;
- }
- else {
- delete source;
- report_script_issue( fname, lineno, -1, "Failed to set up a Tape source from \"%s\"", arg1);
- return CN_INTERP_SYSFAIL;
+ // we don't accept arrays from lua yet
+ CInterpreterShell::TArgs args;
+ while ( ++argth < nargsin ) {
+ CInterpreterShell::SArg A (0);
+ A.type = C.arg_sig[argth-2];
+ switch ( A.type ) {
+ case 's': A.vs = lua_tostring( L, argth); break;
+ case 'd': A.vd = lua_tointeger( L, argth); break;
+ case 'b': A.vd = lua_tointeger( L, argth); break;
+ case 'g': A.vg = lua_tonumber( L, argth); break;
+ default:
+ throw "Fix type literals in SCmdDesc?";
}
}
- } else if ( strcmp( type_s, __SourceTypes[SRC_PERIODIC]) == 0 ) {
- if ( !(arg1 = strtok( nullptr, "\n")) ) {
- report_script_issue( fname, lineno, -1, "Missing filename for a Periodic source in `new_source'");
- return CN_INTERP_PARSEERROR;
- } else {
- CSourcePeriodic *source = new CSourcePeriodic( name_s, arg1);
- if ( source && source->name.size() )
- if ( count( Model->Sources.begin(), Model->Sources.end(), source) == 0 )
- Model->Sources.push_back( source);
- else {
- report_script_issue( fname, lineno, -1, "Duplicate name (\"%s\") for a source", arg1);
- return CN_INTERP_SYSFAIL;
+
+ // return: ok result code, # of values pushed, value0, value1, ...; o
+ // non-ok result code, error string
+ auto R = (this_p ->* C.fun)( args);
+ lua_settop( L, 0);
+ lua_pushboolean( L, true);
+ lua_pushinteger( L, R.result);
+ if ( R.result == CInterpreterShell::TCmdResult::ok ) {
+ lua_pushinteger( L, R.values.size());
+ for ( auto& V : R.values )
+ switch (V.type) {
+ case 's': lua_pushstring( L, V.vs.c_str()); break;
+ case 'd': lua_pushinteger( L, V.vd); break;
+ case 'g': lua_pushnumber( L, V.vg); break;
+ default:
+ throw "Fix type literals in SCmdDesc?";
}
- else {
- delete source;
- report_script_issue( fname, lineno, -1, "Failed to set up a Periodic source from \"%s\"", arg1);
- return CN_INTERP_SYSFAIL;
- }
- }
- } else if ( strcmp( type_s, __SourceTypes[SRC_NOISE]) == 0 ) {
- if ( !(arg1 = strtok( nullptr, ":")) ||
- !(arg2 = strtok( nullptr, "\n")) ) {
- report_script_issue( fname, lineno, -1, "Incomplete min:max set for a Noise source in `new_source'");
- return CN_INTERP_PARSEERROR;
+ return 1 + 1 + 1 + R.values.size();
} else {
- double _min, _max;
- if ( expr( arg1, _min, &varlist) ||
- expr( arg2, _max, &varlist) ) {
- report_script_issue( fname, lineno, -1, "Bad min:max values for a Noise source");
- return CN_INTERP_PARSEERROR;
- }
- CSourceNoise *source = new CSourceNoise( name_s, _min, _max);
- if ( source && source->name.size() ) {
- Model->Sources.push_back( source);
- } else {
- delete source;
- report_script_issue( fname, lineno, -1, "Failed to set up a Noise source");
- return CN_INTERP_SYSFAIL;
- }
+ lua_pushstring( L, R.error_message.c_str());
+ return 1 + 1 + 1;
}
- } else if ( strcmp( type_s, __SourceTypes[SRC_FUNCTION]) == 0 ) {
- report_script_issue( fname, lineno, -1, "Go code, Chris!");
- return CN_INTERP_SYSFAIL;
- } else {
- report_script_issue( fname, lineno, -1, "Unrecognised source type in `new_source'");
- return CN_INTERP_PARSEERROR;
}
+ reperr( stilton::str::sasprintf(
+ "Unrecognized function \"%s\"/%zu",
+ opcode, nargsin - 2).c_str());
+ return 2;
+}
+}
- regenerate_source_ids = true;
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_show_sources]) == 0 ) {
- CHECK_MODEL;
- for ( list<C_BaseSource*>::iterator S = Model->Sources.begin(); S != Model->Sources.end(); S++ )
- (*S)->dump();
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_connect_source]) == 0 ) {
- CHECK_MODEL;
- char *label_s, *parm_s, *source_s;
- if ( !operand ||
- !(source_s = strtok( operand, " \t")) ||
- !(label_s = strtok( nullptr, " \t")) ||
- !(parm_s = strtok( nullptr, "\n")) ) {
- report_script_issue( fname, lineno, -1, "Missing source id, unit label and/or parameter in `connect_source'");
- return CN_INTERP_PARSEERROR;
- }
- C_BaseSource *source = Model->source_by_id( source_s);
- if ( !source ) {
- report_script_issue( fname, lineno, -1, "Unknown source \"%s\"", source_s);
- return CN_INTERP_PARSEERROR;
- }
-
- list<CModel::STagGroupSource> tags;
- tags.push_back( CModel::STagGroupSource (label_s, true, parm_s, source));
- Model->process_paramset_source_tags( tags);
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_disconnect_source]) == 0 ) {
- CHECK_MODEL;
- char *label_s, *parm_s, *source_s;
- if ( !operand ||
- !(label_s = (strtok( operand, " \t"))) ||
- !(parm_s = strtok( nullptr, " \t")) ||
- !(source_s = strtok( nullptr, "\n")) ) {
- report_script_issue( fname, lineno, -1, "Missing label, parameter or source in `disconnect_source'");
- return CN_INTERP_PARSEERROR;
- }
- C_BaseSource *source = Model->source_by_id( source_s);
- if ( !source ) {
- report_script_issue( fname, lineno, -1, "Unknown source \"%s\"", source_s);
- return CN_INTERP_PARSEERROR;
- }
-
- list<CModel::STagGroupSource> specs;
- specs.push_back( CModel::STagGroupSource (label_s, false, parm_s, source));
- Model->process_paramset_source_tags( specs);
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_cull_deaf_synapses]) == 0 ) {
- CHECK_MODEL;
- Model->cull_deaf_synapses();
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_describe_model]) == 0 ) {
- CHECK_MODEL;
- Model->dump_metrics();
- Model->dump_units();
- Model->dump_state();
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_show_units]) == 0 ) {
- CHECK_MODEL;
- if ( !operand )
- operand = const_cast<char*>(".*");
-
- regex_t RE;
- if ( 0 != regcomp( &RE, operand, REG_EXTENDED | REG_NOSUB) ) {
- report_script_issue( fname, lineno, -1, "Invalid regexp for `show_units' arg");
- return CN_INTERP_PARSEERROR;
- }
- size_t cnt = 0;
- for_model_units (Model,U)
- if ( regexec( &RE, (*U)->label(), 0, 0, 0) == 0 ) {
- (*U) -> dump( true);
- cnt++;
- }
- if ( cnt )
- lprintf( 0, "------------\n%zd units total\n", cnt);
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_exec]) == 0 ) {
- return interpreter_run( operand, level+1, Options.interp_howstrict,
- true, true, varlist);
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_verbosity]) == 0 ) {
- if ( !operand )
- lprintf( 0, "verbosity level is %d", Options.verbosely);
- else if ( sscanf( operand, "%d", &Options.verbosely) < 1 ) {
- report_script_issue( fname, lineno, -1, "Bad value for `verbosity'");
- return CN_INTERP_PARSEERROR;
- }
- if ( Model )
- Model->verbosely = Options.verbosely;
- __cn_verbosely = Options.verbosely;
-
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_exit]) == 0 ) {
- delete Model;
- Model = nullptr;
- return CN_INTERP_EXIT;
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_show_vars]) == 0 ) {
- if ( !operand )
- operand = const_cast<char*>(".*");
- regex_t RE;
- if ( 0 != regcomp( &RE, operand, REG_EXTENDED | REG_NOSUB) ) {
- report_script_issue( fname, lineno, -1, "Invalid regexp for `show_vars' arg");
- return CN_INTERP_PARSEERROR;
- }
- size_t cnt = 0;
- size_t longest_id = 0;
- for ( auto& V : varlist )
- if ( regexec( &RE, V.name, 0, 0, 0) == 0 )
- if ( longest_id < strlen( V.name) )
- longest_id = strlen( V.name);
- for ( auto& V : varlist )
- if ( regexec( &RE, V.name, 0, 0, 0) == 0 ) {
- lprintf( 0, " %*s = %g", (int)longest_id, V.name, V.value);
- ++cnt;
- }
- if ( cnt > 1 )
- lprintf( 0, "---------- %zu variables\n", cnt);
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_clear_vars]) == 0 ) {
- if ( !operand )
- varlist.clear();
- else {
- regex_t RE;
- if ( 0 != regcomp( &RE, operand, REG_EXTENDED | REG_NOSUB) ) {
- report_script_issue( fname, lineno, -1, "Invalid regexp for `clear_vars' arg");
- return CN_INTERP_PARSEERROR;
- }
- for ( list<SVariable>::iterator V = varlist.begin(); V != varlist.end(); V++ )
- if ( regexec( &RE, V->name, 0, 0, 0) == 0 ) {
- varlist.erase( V);
- break;
- }
+cnrun::CInterpreterShell::TScriptExecResult
+cnrun::CInterpreterShell::
+exec_script( const string& script_fname)
+{
+ // 0. load script
+ string script_contents;
+ {
+ ifstream oleg (script_fname);
+ char b[8888];
+ while ( oleg.good() ) {
+ size_t n = oleg.readsome( b, 8888);
+ if ( n == 0 )
+ break;
+ b[n] = 0;
+ script_contents += b;
+ }
+ if ( script_contents.size() == 0 ) {
+ vp( 0, "%s: empty file", script_fname.c_str());
+ return TScriptExecResult::file_error;
}
+ }
- regenerate_var_names = true;
-
-
- } else if ( strcmp( cmd, cnrun_cmd[CNCMD_pause]) == 0 ) {
- if ( operand ) {
- double s;
- if ( expr( operand, s, &varlist) )
- return CN_INTERP_PARSEERROR;
- if ( s < 0 ) {
- lprintf( 0, "Can't sleep backwards in time");
- return CN_INTERP_WARN;
- }
- printf( "(Paused for %u sec)", (unsigned int)s); fflush(stdin);
- sleep( rintf( s));
- printf( "\n");
- } else {
- printf( "Paused: press Enter ...");
- getchar();
- }
+ // 1a. prepare lua side
+ lua_settop( lua_state, 0);
+
+ // 1b. compile
+ int ret1 = luaL_loadbuffer(
+ lua_state,
+ script_contents.c_str(),
+ script_contents.size(),
+ script_fname.c_str());
+ if ( ret1 ) {
+ const char* errmsg = lua_tostring( lua_state, -1);
+ vp( 0, "%s: compilation failed: %s (%d)", script_fname.c_str(), errmsg, ret1);
+ return TScriptExecResult::compile_error;
+ }
- } else { // feed it to exprparser
- if ( expr( raw, result, &varlist) ) {
- report_script_issue( fname, lineno, -1, "%s", expr.status_s());
- return CN_INTERP_PARSEERROR;
- }
- if ( expr.toplevel_op != '=' )
- lprintf( 0, "%g", result);
+ // 1c. put host_fun on stack
+ if ( !lua_checkstack( lua_state, 2) ) {
+ vp( 0, "failed to grow stack for 2 elements");
+ return TScriptExecResult::stack_error;
+ }
- regenerate_var_names = true;
+ lua_pushlightuserdata( lua_state, this);
+ lua_pushcfunction( lua_state, host_fun);
+
+ // 1d. exec script
+ int call_result = lua_pcall(
+ lua_state,
+ 2, // nargsin
+ 1, // nargsout
+ 0);
+ if ( call_result ) {
+ vp( 0, "%s: script call failed (%d)", script_fname.c_str(), call_result);
+ return TScriptExecResult::call_error;
}
- return 0;
+ return TScriptExecResult::ok;
}
-} // inline namespace
-
-
-
+/*
+#define CNRUN_HISTORY ".cnrun-history"
int
-cnrun::CInterpreter::
-run( const string& script_fname, int level, int howstrict,
- TEnvImportOption env_import_option, TEnvExportOption env_export_option,
- list<SVariable> &varlist)
+cnrun::CInterpreterShell::
+exec_script( const string& script_fname)
{
int retval = 0;
- list<SVariable> our_variables;
- current_shell_variables = &our_variables;
-
- if ( env_import ) {
- our_variables.splice( our_variables.begin(), varlist);
-// printf( "imported %zu vars\n", our_variables.size());
- }
-
- if ( script_fname && strlen(script_fname) > 0 ) {
- ifstream script_stream( script_fname);
+ if ( script_fname != "-" ) {
+ ifstream script_stream( script_fname.c_str());
if ( !script_stream.good() ) {
- lprintf( -1, "Failed to open script file \"%s\"", script_fname);
+ vp( -1, "Failed to open script file \"%s\"", script_fname);
return -1;
}
- lprintf( 1, "execing %s\n", script_fname);
+ vp( 1, "execing %s\n", script_fname.c_str());
- size_t lineno = 0;
+ current_line = 0;
string buf;
while ( getline( script_stream, buf) || script_stream.gcount() ) {
- lineno++;
+ ++current_line;
if ( buf.size() ) {
char *pp = strchr( (char*)buf.c_str(), '#');
@@ -879,7 +316,7 @@ run( const string& script_fname, int level, int howstrict,
add_history( buf);
}
- char *pp = strchr( buf, '#');
+ char *pp = strchr( buf, '#');
if ( pp )
*pp = '\0';
size_t buflen = strlen( buf);
@@ -921,6 +358,7 @@ run( const string& script_fname, int level, int howstrict,
return retval;
}
+*/
// Local Variables:
// Mode: c++
diff --git a/upstream/src/cnrun/main.cc b/upstream/src/cnrun/main.cc
index 79218b2..3ee0184 100644
--- a/upstream/src/cnrun/main.cc
+++ b/upstream/src/cnrun/main.cc
@@ -18,7 +18,7 @@
#include <string>
#include "libstilton/exprparser.hh"
-#include "runner.hh"
+#include "cnrun.hh"
#if HAVE_CONFIG_H && !defined(VERSION)
# include "config.h"
@@ -133,19 +133,19 @@ parse_opt( int key, const char *arg, struct argp_state *state)
switch ( key ) {
case opt::dt_max:
if ( Q.integration_dt_max = strtof( arg, &endp), *endp ) {
- fprintf( stderr, "Expected an FP value for dt-max arg, got \"%s\"", arg);
+ fprintf( stderr, "Expected a floating-point value for dt-max arg, got \"%s\"", arg);
return (error_t)ARGP_ERR_UNKNOWN;
}
break;
case opt::dt_min:
if ( Q.integration_dt_min = strtof( arg, &endp), *endp ) {
- fprintf( stderr, "Expected an FP value for dt-min arg, got \"%s\"", arg);
+ fprintf( stderr, "Expected a floating-point value for dt-min arg, got \"%s\"", arg);
return (error_t)ARGP_ERR_UNKNOWN;
}
break;
case opt::dt_xcap:
if ( Q.integration_dt_max_cap = strtof( arg, &endp), *endp ) {
- fprintf( stderr, "Expected an FP value for dt-xcap arg, got \"%s\"", arg);
+ fprintf( stderr, "Expected a floating-point value for dt-xcap arg, got \"%s\"", arg);
return (error_t)ARGP_ERR_UNKNOWN;
}
break;
@@ -163,7 +163,7 @@ parse_opt( int key, const char *arg, struct argp_state *state)
case opt::listen_dt:
if ( Q.listen_dt( arg, &endp), *endp ) {
- fprintf( stderr, "Expected an FP value for listen-dt arg, got \"%s\"", arg);
+ fprintf( stderr, "Expected a floating-point value for listen-dt arg, got \"%s\"", arg);
return (error_t)ARGP_ERR_UNKNOWN;
}
break;
@@ -324,8 +324,6 @@ main( int argc, char *argv[])
// } // namespace
-
-
// Local Variables:
// indent-tabs-mode: nil
// tab-width: 8
diff --git a/upstream/src/cnrun/runner.hh b/upstream/src/cnrun/runner.hh
deleted file mode 100644
index 3b11e72..0000000
--- a/upstream/src/cnrun/runner.hh
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- * File name: cnrun/runner.hh
- * Project: cnrun
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * Initial version: 2008-11-04
- *
- * Purpose: interpreter
- *
- * License: GPL
- */
-
-#ifndef CNRUN_CNRUN_RUNNER_H_
-#define CNRUN_CNRUN_RUNNER_H_
-
-#include <list>
-#include <string>
-
-#include "libstilton/exprparser.hh"
-
-#include "libcn/model.hh"
-
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
-namespace cnrun {
-
-struct SInterpOptions
- : public cnrun::SModelOptions {
- bool dump_params:1,
- list_units:1;
- string working_dir;
-
- list<string>
- scripts;
-
- SInterpOptions ()
- : dump_params (false),
- list_units (false),
- working_dir (".")
- {}
-};
-
-
-class CInterpreter {
-
- public:
- enum class TStrictness { strict, loose };
- CInterpreter ()
- : model (nullptr)
- {}
- ~CInterpreter ()
- {
- if ( model )
- delete model;
- }
-
- SInterpOptions
- options;
-
- enum class TEnvImportOption { yes, no };
- enum class TEnvExportOption { yes, no };
- int run( const string& script_fname, const SInterpOptions&,
- TEnvImportOption, TEnvExportOption);
- int new_model( const string& model_name, const string& fname, size_t lineno);
-
- private:
- cnrun::CModel
- *model;
-
- bool regenerate_unit_labels,
- regenerate_var_names,
- regenerate_source_ids;
-
- static list<cnrun::stilton::SVariable>
- *current_env;
-};
-
-
-
-
-extern const char* const cnrun_cmd[];
-
-
-enum class TInterpCommand {
- noop,
- new_model,
- load_nml,
- merge_nml,
- add_neuron,
- add_synapse,
- reset,
- reset_revert_params,
- reset_state_units,
- advance_until,
- advance,
- putout,
- decimate,
- start_listen,
- stop_listen,
- listen_dt,
- listen_mode,
- integration_dt_min,
- integration_dt_max,
- integration_dt_cap,
- start_log_spikes,
- stop_log_spikes,
- set_sxf_params,
- new_source,
- show_sources,
- connect_source,
- disconnect_source,
- set_parm_neuron,
- set_parm_synapse,
- cull_deaf_synapses,
- describe_model,
- show_units,
- exec,
- verbosity,
- show_vars,
- clear_vars,
- pause,
- exit
-};
-
-
-
-char** cnrun_completion( const char *text, int start, int end);
-
-} // namespace cnrun
-
-#endif
-
-// Local Variables:
-// Mode: c++
-// indent-tabs-mode: nil
-// tab-width: 8
-// c-basic-offset: 8
-// End:
diff --git a/upstream/src/libcn/Makefile.am b/upstream/src/libcn/Makefile.am
index 5691f66..be7b986 100644
--- a/upstream/src/libcn/Makefile.am
+++ b/upstream/src/libcn/Makefile.am
@@ -12,6 +12,7 @@ libcn_la_SOURCES = \
hosted-neurons.cc \
hosted-synapses.cc \
model-struct.cc \
+ model-tags.cc \
model-cycle.cc \
model-nmlio.cc \
sources.hh \
diff --git a/upstream/src/libcn/base-neuron.hh b/upstream/src/libcn/base-neuron.hh
index f0927b8..ffc69ad 100644
--- a/upstream/src/libcn/base-neuron.hh
+++ b/upstream/src/libcn/base-neuron.hh
@@ -1,17 +1,22 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: libcn/base-neuron.hh
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2009-03-31
*
- * License: GPL-2+
- *
- * Initial version: 2009-03-31
+ * Purpose: neuron base class
*
+ * License: GPL
*/
-
#ifndef CNRUN_LIBCN_BASENEURON_H_
#define CNRUN_LIBCN_BASENEURON_H_
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
+
#include <list>
#include <cstring>
#include <cmath>
@@ -22,20 +27,14 @@
#include "base-unit.hh"
#include "base-synapse.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
using namespace std;
namespace cnrun {
-#define CN_MIN_DISTANCE .1
-
struct SSpikeloggerService;
-typedef map<C_BaseSynapse*, double> SCleft;
+using SCleft = map<C_BaseSynapse*, double>;
inline double operator+ ( double a, const pair<C_BaseSynapse*, double>& b) { return a + b.second; }
class C_BaseNeuron
@@ -74,24 +73,13 @@ class C_BaseNeuron
}
// distance
- double operator- ( const SCoord &p)
+ double operator- ( const SCoord &p) const
{
return sqrt( pow(x - p.x, 2) + pow(y - p.y, 2) + pow(z - p.z, 2));
}
-
- bool operator== ( const SCoord &p) const
- {
- return x == p.x && y == p.y && z == p.z;
- }
- bool operator!= ( const SCoord &p) const
- {
- return x != p.x || y != p.y || z != p.z;
- }
- bool too_close( const SCoord& p, double mindist = CN_MIN_DISTANCE) const
+ bool too_close( const SCoord& p, double mindist = .42 /* units? */) const
{
- return fabs(x - p.x) < mindist &&
- fabs(y - p.y) < mindist &&
- fabs(z - p.z) < mindist;
+ return operator-(p) < mindist;
}
};
@@ -162,7 +150,7 @@ class C_BaseNeuron
enable_spikelogging_service( double sample_period, double sigma, double from = 0.,
int s_mask = 0);
void disable_spikelogging_service();
- void sync_spikelogging_history();
+ void sync_spikelogging_history() const;
double distance_to( C_BaseNeuron*) const; // will do on demand
@@ -256,7 +244,7 @@ struct SSpikeloggerService {
}
protected:
- void sync_history();
+ void sync_history() const;
private:
int _status;
@@ -276,7 +264,7 @@ C_BaseNeuron::reset_state()
inline void
-C_BaseNeuron::sync_spikelogging_history()
+C_BaseNeuron::sync_spikelogging_history() const
{
if ( _spikelogger_agent )
_spikelogger_agent->sync_history();
diff --git a/upstream/src/libcn/base-synapse.hh b/upstream/src/libcn/base-synapse.hh
index 3ed2aab..ac98f99 100644
--- a/upstream/src/libcn/base-synapse.hh
+++ b/upstream/src/libcn/base-synapse.hh
@@ -1,17 +1,21 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: libcn/base-synapse.hh
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2009-03-31
*
- * License: GPL-2+
+ * Purpose: synapse base class
*
- * Initial version: 2009-03-31
- *
- * Synapse units: alpha-beta
+ * License: GPL
*/
+#ifndef CNRUN_LIBCN_BASESYNAPSE_H_
+#define CNRUN_LIBCN_BASESYNAPSE_H_
-#ifndef CNRUN_LIBCN_BASE_SYNAPSE_H_
-#define CNRUN_LIBCN_BASE_SYNAPSE_H_
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
#include <cmath>
#include <vector>
@@ -23,10 +27,6 @@
#include "forward-decls.hh"
#include "base-unit.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
using namespace std;
diff --git a/upstream/src/libcn/base-unit.cc b/upstream/src/libcn/base-unit.cc
index 5922abd..b000f54 100644
--- a/upstream/src/libcn/base-unit.cc
+++ b/upstream/src/libcn/base-unit.cc
@@ -1,13 +1,18 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: libcn/base-unit.cc
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2008-08-02
*
- * License: GPL-2+
- *
- * Initial version: 2008-08-02
+ * Purpose: unit base class
*
+ * License: GPL
*/
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
#include <fcntl.h>
#include <unistd.h>
@@ -21,17 +26,13 @@
#include "base-unit.hh"
#include "model.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
using namespace std;
using cnrun::alg::member;
cnrun::C_BaseUnit::
-C_BaseUnit( TUnitType intype, const string& inlabel,
+C_BaseUnit (TUnitType intype, const string& inlabel,
CModel* inM, int s_mask)
: precision (cn_default_unit_precision),
_type (intype), _status (0 |/* CN_UENABLED |*/ s_mask),
@@ -39,15 +40,16 @@ C_BaseUnit( TUnitType intype, const string& inlabel,
_binwrite_handle (-1), _listener_disk (nullptr), _listener_mem (nullptr)
{
memset( _label, 0, max_label_size);
- if ( inlabel.size() ) {
+ if ( inlabel.size() )
strncpy( _label, inlabel.c_str(), max_label_size);
- if ( inM && inM->unit_by_label( _label) ) {
- fprintf( stderr, "Model %s already has a unit labelled \"%s\"\n", inM->name.c_str(), _label);
- _status |= CN_UERROR;
- }
- } else
+ else
snprintf( _label, max_label_size-1, "fafa%p", this);
+ if ( inM && inM->unit_by_label( _label) ) {
+ fprintf( stderr, "Model %s already has a unit labelled \"%s\"\n", inM->name.c_str(), _label);
+ _status |= CN_UERROR;
+ }
+
reset_params();
// don't have field idx to do reset_vars() safely
}
@@ -120,7 +122,7 @@ start_listening( int mask)
if ( mask & CN_ULISTENING_DISK ) {
if ( M->is_diskless )
- fprintf( stderr, "Cannot get Unit \"%s\" to listen to disk in a diskless model\n", _label);
+ M->vp( 1, stderr, "Cannot get Unit \"%s\" to listen to disk in a diskless model\n", _label);
else {
_listener_disk = new ofstream( (string(_label)+".var").c_str(), ios_base::trunc);
_listener_disk->precision( precision);
@@ -167,7 +169,7 @@ stop_listening()
if ( _binwrite_handle != -1 )
if ( write( _binwrite_handle, _listener_mem->data(),
sizeof(double) * _listener_mem->size()) < 1 )
- fprintf( stderr, "write() failed on \"%s.varx\"\n", _label);
+ M->vp( 0, stderr, "write() failed on \"%s.varx\"\n", _label);
}
if ( _listener_mem ) {
@@ -206,7 +208,7 @@ tell()
if ( write( _binwrite_handle, &M->V[0], sizeof(double)) < 1 ||
write( _binwrite_handle, &var_value(0),
sizeof(double) * ((_status & CN_ULISTENING_1VARONLY) ? 1 : v_no())) < 1 )
- fprintf( stderr, "write() failed in tell() for \"%s\"\n", _label);
+ M->vp( 0, stderr, "write() failed in tell() for \"%s\"\n", _label);
}
if ( _listener_disk && !(_status & CN_ULISTENING_DEFERWRITE) ) {
@@ -254,12 +256,12 @@ dump( bool with_params, FILE *strm) const
fprintf( strm, "%s = %g; ", var_sym(v), get_var_value(v));
fprintf( strm, "\n");
- if ( sources.size() ) {
+ if ( _sources.size() ) {
fprintf( strm, " has sources: ");
- for ( auto &S : sources )
+ for ( auto &S : _sources )
fprintf( strm, "%s << %s; ",
(S.sink_type == SINK_PARAM) ? param_sym(S.idx) : var_sym(S.idx),
- S.source->name.c_str());
+ S.source->name());
fprintf( strm, "\n");
}
@@ -283,10 +285,10 @@ cnrun::C_BaseUnit::
detach_source( C_BaseSource *s, TSinkType sink_type, size_t idx)
{
// list <SSourceInterface<C_BaseSource>>::iterator K;
- // while ( (K = find( sources.begin(), sources.end(),
- // )) != sources.end() )
- // sources.erase( K);
- sources.remove( SSourceInterface<C_BaseSource> (s, sink_type, idx));
+ // while ( (K = find( _sources.begin(), _sources.end(),
+ // )) != _sources.end() )
+ // _sources.erase( K);
+ _sources.remove( SSourceInterface<C_BaseSource> (s, sink_type, idx));
M->unregister_unit_with_sources( this);
}
@@ -295,7 +297,7 @@ void
cnrun::C_BaseUnit::
apprise_from_sources()
{
- for ( auto &S : sources )
+ for ( auto &S : _sources )
switch ( S.sink_type ) {
case SINK_PARAM:
// printf( "apprise_from_sources() for %s{%d} = %g\n", _label, S->idx, (*S->source)( model_time()));
@@ -313,7 +315,7 @@ cnrun::C_BaseUnit::
~C_BaseUnit()
{
if ( M )
- M->vp( 5, stderr, " deleting base unit \"%s\"\n", _label);
+ M->vp( 5, " deleting base unit \"%s\"\n", _label);
if ( is_listening() ) {
stop_listening();
@@ -376,7 +378,7 @@ cnrun::C_BaseNeuron::
~C_BaseNeuron()
{
if ( M )
- M->vp( 4, stderr, " deleting base neuron \"%s\"\n", _label);
+ M->vp( 4, " deleting base neuron \"%s\"\n", _label);
// kill all efferents
for ( auto Y = _axonal_harbour.rbegin(); Y != _axonal_harbour.rend(); ++Y ) {
@@ -471,7 +473,7 @@ get_sxf_vector_custom( vector<double> *sdf_buffer, vector<double> *shf_buffer,
nspikes_buffer->clear();
for ( double t = from; t <= to; t += sample_period_custom ) {
- size_t nspikes = 0;
+ size_t nspikes = 0;
double sdf_value = sdf(
t, sample_period_custom,
sigma_custom, &nspikes);
@@ -489,7 +491,7 @@ get_sxf_vector_custom( vector<double> *sdf_buffer, vector<double> *shf_buffer,
void
cnrun::SSpikeloggerService::
-sync_history()
+sync_history() const
{
if ( !_client->M || (_client->M && _client->M->is_diskless) )
return;
@@ -563,10 +565,10 @@ clone_to_target( C_BaseNeuron *tgt, double g)
{
// check if we have no existing connection already to tgt
if ( member( tgt, _targets) ) {
- fprintf( stderr, "Neuron \"%s\" already synapsing onto \"%s\"\n",
- _source->_label, tgt->_label);
- return nullptr;
- }
+ M->vp( 1, stderr, "Neuron \"%s\" already synapsing onto \"%s\"\n",
+ _source->_label, tgt->_label);
+ return nullptr;
+ }
tgt -> _dendrites[this] = g;
_targets.push_back( tgt);
@@ -590,12 +592,12 @@ make_clone_independent( C_BaseNeuron *tgt)
if ( M )
M->vp( 4, "promoting a clone of %s synapse from \"%s\" to \"%s\"\n",
species(), _label, tgt->_label);
- if ( member( tgt, _targets ) )
- fprintf( stderr, "ебать!\n");
+ // if ( unlikely (member( tgt, _targets)) )
+ // fprintf( stderr, "ебать!\n");
_targets.remove( tgt);
- if ( member( this, tgt->_dendrites ) )
- fprintf( stderr, "ебать-колотить!\n");
+ // if ( unlikely (member( this, tgt->_dendrites)) )
+ // fprintf( stderr, "ебать-колотить!\n");
tgt -> _dendrites.erase( this);
snprintf( _label, max_label_size-1, "%s:%zu", _source->_label, _targets.size());
diff --git a/upstream/src/libcn/base-unit.hh b/upstream/src/libcn/base-unit.hh
index 9710cf0..fff96a3 100644
--- a/upstream/src/libcn/base-unit.hh
+++ b/upstream/src/libcn/base-unit.hh
@@ -1,17 +1,22 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: libcn/base-unit.hh
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2008-08-02
*
- * License: GPL-2+
- *
- * Initial version: 2008-08-02
+ * Purpose: unit base class
*
+ * License: GPL
*/
-
#ifndef CNRUN_LIBCN_BASEUNIT_H_
#define CNRUN_LIBCN_BASEUNIT_H_
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
+
#include <fstream>
#include <cstring>
#include <vector>
@@ -23,10 +28,6 @@
#include "types.hh"
#include "sources.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
using namespace std;
using cnrun::stilton::str::sasprintf;
@@ -107,7 +108,7 @@ class C_BaseUnit {
// parent model
const CModel&
parent_model() const { return *M; }
- const double
+ double
model_time() const; // defined in model.h
bool is_owned() const { return _status & CN_UOWNED; }
@@ -138,11 +139,11 @@ class C_BaseUnit {
}
bool has_sources() const __attribute__ ((pure))
{
- return not sources.empty();
+ return not _sources.empty();
}
bool has_same_sources( const C_BaseUnit &rv) const __attribute__ ((pure))
{
- return sources == rv.sources;
+ return _sources == rv._sources;
// not sure taking the order of otherwise identical sources should matter
}
bool is_identical( const C_BaseUnit &rv) const __attribute__ ((pure))
@@ -161,7 +162,7 @@ class C_BaseUnit {
get_param_value( const string& sym) const
{
int id = param_idx_by_sym( sym);
- if ( id == -1 )
+ if ( unlikely (id == -1) )
throw sasprintf( "Bad parameter name \"%s\" for unit \"%s\"", sym.c_str(), _label);
return P[id];
}
@@ -176,8 +177,9 @@ class C_BaseUnit {
param_value( const string& sym)
{
int id = param_idx_by_sym( sym);
- if ( id == -1 )
- throw sasprintf( "Bad parameter name \"%s\" for unit \"%s\"", sym.c_str(), _label);
+ if ( unlikely (id == -1) )
+ throw sasprintf( "Bad parameter name \"%s\" for unit \"%s\"",
+ sym.c_str(), _label);
return P[id];
}
@@ -267,7 +269,7 @@ class C_BaseUnit {
vector<double> P;
list<SSourceInterface<C_BaseSource>>
- sources;
+ _sources;
private:
// where vars are written by tell()
diff --git a/upstream/src/libcn/forward-decls.hh b/upstream/src/libcn/forward-decls.hh
index 3e20242..7c038a7 100644
--- a/upstream/src/libcn/forward-decls.hh
+++ b/upstream/src/libcn/forward-decls.hh
@@ -1,17 +1,16 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: libcn/forward-decls.hh
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * Initial version: 2014-09-16
*
- * License: GPL-2+
+ * Purpose: forward declarations
*
- * Initial version: 2014-09-16
- *
- * Purpose: forward declarations
+ * License: GPL
*/
-
-#ifndef CNRUN_LIBCN_FORWARD_DECLS_H
-#define CNRUN_LIBCN_FORWARD_DECLS_H
+#ifndef CNRUN_LIBCN_FORWARDDECLS_H_
+#define CNRUN_LIBCN_FORWARDDECLS_H_
namespace cnrun {
diff --git a/upstream/src/libcn/hosted-attr.hh b/upstream/src/libcn/hosted-attr.hh
index d971578..5eca9c0 100644
--- a/upstream/src/libcn/hosted-attr.hh
+++ b/upstream/src/libcn/hosted-attr.hh
@@ -1,23 +1,25 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: libcn/hosted-attr.hh
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2009-03-31
*
- * License: GPL-2+
- *
- * Initial version: 2009-03-31
+ * Purpose: Interface class containing hosted unit attributes.
*
+ * License: GPL
*/
-
-#ifndef LIBCN_HOSTED_ATTR_H
-#define LIBCN_HOSTED_ATTR_H
-
-#include <vector>
+#ifndef CNRUN_LIBCN_HOSTEDATTR_H_
+#define CNRUN_LIBCN_HOSTEDATTR_H_
#if HAVE_CONFIG_H && !defined(VERSION)
# include "config.h"
#endif
+#include "libstilton/lang.hh"
+#include <vector>
+
using namespace std;
@@ -25,28 +27,30 @@ namespace cnrun {
class C_HostedAttributes {
- friend class CIntegrateRK65;
+ friend class CIntegrateRK65;
+ friend class CModel;
protected:
- C_HostedAttributes()
- {}
-
- friend class CModel;
// variables for units in the model are catenated on a single
// vector<double>, as an essential optimization measure; each
// unit knows its own set of variables by this idx:
- size_t idx;
+ size_t idx;
// the containing model provides idx on registering our unit
public:
- virtual void reset_vars() = 0;
- virtual double &var_value( size_t) = 0;
+ virtual void reset_vars() = 0;
+ virtual double &var_value( size_t) = 0;
- virtual void derivative( vector<double>&, vector<double>&) = 0;
+ virtual void derivative( vector<double>&, vector<double>&) = 0;
};
}
#endif
-// EOF
+// Local Variables:
+// Mode: c++
+// indent-tabs-mode: nil
+// tab-width: 8
+// c-basic-offset: 8
+// End:
diff --git a/upstream/src/libcn/hosted-neurons.cc b/upstream/src/libcn/hosted-neurons.cc
index d26ab35..bacf4c3 100644
--- a/upstream/src/libcn/hosted-neurons.cc
+++ b/upstream/src/libcn/hosted-neurons.cc
@@ -1,13 +1,19 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: libcn/hosted-neurons.cc
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2008-10-16
*
- * License: GPL-2+
- *
- * Initial version: 2008-10-16
+ * Purpose: hosted neuron classes (those having their
+ * state vars on parent model's integration vectors)
*
+ * License: GPL
*/
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
#include <cmath>
#include <iostream>
@@ -18,10 +24,6 @@
#include "types.hh"
#include "model.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
cnrun::C_HostedNeuron::
C_HostedNeuron (TUnitType intype, const string& inlabel,
@@ -755,8 +757,6 @@ const double cnrun::__CN_Vars_OscillatorVdPol[] = {
};
-
-
//#endif // CN_WANT_MORE_NEURONS
// Local Variables:
diff --git a/upstream/src/libcn/hosted-neurons.hh b/upstream/src/libcn/hosted-neurons.hh
index f71955c..37029cc 100644
--- a/upstream/src/libcn/hosted-neurons.hh
+++ b/upstream/src/libcn/hosted-neurons.hh
@@ -1,28 +1,29 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: libcn/hosted-neurons.hh
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2008-10-11
*
- * License: GPL-2+
- *
- * Initial version: 2008-10-11
+ * Purpose: hosted neuron classes (those having their
+ * state vars on parent model's integration vectors)
*
+ * License: GPL
*/
-
-
#ifndef CNRUN_LIBCN_HOSTEDNEURONS_H_
#define CNRUN_LIBCN_HOSTEDNEURONS_H_
-#include "gsl/gsl_math.h"
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
+
+#include <gsl/gsl_math.h>
#include "forward-decls.hh"
#include "base-neuron.hh"
#include "hosted-attr.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
namespace cnrun {
enum class TIncludeOption { is_last, is_notlast, };
diff --git a/upstream/src/libcn/hosted-synapses.cc b/upstream/src/libcn/hosted-synapses.cc
index 121ec6b..ac42ed2 100644
--- a/upstream/src/libcn/hosted-synapses.cc
+++ b/upstream/src/libcn/hosted-synapses.cc
@@ -1,13 +1,19 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: libcn/hosted-synapses.cc
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2009-04-03
*
- * License: GPL-2+
- *
- * Initial version: 2009-04-03
+ * Purpose: hosted synapse classes (those having their
+ * state vars on parent model's integration vectors)
*
+ * License: GPL
*/
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
#include <iostream>
@@ -17,20 +23,16 @@
#include "types.hh"
#include "model.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
using namespace std;
// the base synapse here
cnrun::C_HostedSynapse::
-C_HostedSynapse( TUnitType intype,
+C_HostedSynapse (TUnitType intype,
C_BaseNeuron *insource, C_BaseNeuron *intarget,
double ing, CModel *inM, int s_mask,
TIncludeOption include_option)
- : C_BaseSynapse( intype, insource, intarget, ing, inM, s_mask),
+ : C_BaseSynapse (intype, insource, intarget, ing, inM, s_mask),
C_HostedAttributes()
{
if ( M )
diff --git a/upstream/src/libcn/hosted-synapses.hh b/upstream/src/libcn/hosted-synapses.hh
index 075d321..25d8fee 100644
--- a/upstream/src/libcn/hosted-synapses.hh
+++ b/upstream/src/libcn/hosted-synapses.hh
@@ -1,18 +1,23 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: libcn/hosted-synapses.hh
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2009-04-01
*
- * License: GPL-2+
+ * Purpose: hosted synapse classes (those having their
+ * state vars on parent model's integration vectors)
*
- * Initial version: 2009-04-01
- *
- * Synapse units: alpha-beta
+ * License: GPL
*/
-
#ifndef CNRUN_LIBCN_HOSTEDSYNAPSES_H_
#define CNRUN_LIBCN_HOSTEDSYNAPSES_H_
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
+
#include <vector>
#include <queue>
#include <cfloat>
@@ -23,10 +28,6 @@
#include "hosted-neurons.hh"
#include "standalone-neurons.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
using namespace std;
@@ -307,7 +308,6 @@ class CSynapseRall_rd;
class CSynapseRall_dr;
class CSynapseRall_rr;
-
}
#endif
diff --git a/upstream/src/libcn/integrate-base.hh b/upstream/src/libcn/integrate-base.hh
index e48522b..2481a27 100644
--- a/upstream/src/libcn/integrate-base.hh
+++ b/upstream/src/libcn/integrate-base.hh
@@ -1,24 +1,25 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny
+ * File name: libcn/integrate-base.hh
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny
+ * Initial version: 2008-09-23
*
- * License: GPL-2+
+ * Purpose: base class for integrators, to be plugged into CModel.
*
- * Initial version: 2008-09-23
- *
- * A base class for integrators, to be plugged into CModel
+ * License: GPL
*/
#ifndef CNRUN_LIBCN_INTEGRATE_BASE_H_
#define CNRUN_LIBCN_INTEGRATE_BASE_H_
-#include "libstilton/lang.hh"
-#include "forward-decls.hh"
-
#if HAVE_CONFIG_H && !defined(VERSION)
# include "config.h"
#endif
+#include "libstilton/lang.hh"
+#include "forward-decls.hh"
+
namespace cnrun {
@@ -27,7 +28,7 @@ class CIntegrate_base {
DELETE_DEFAULT_METHODS (CIntegrate_base)
public:
- double _dt_min, _dt_max,
+ double _dt_min, _dt_max, _dt_cap,
_eps, _eps_abs, _eps_rel,
dt; // that which is current
@@ -35,10 +36,10 @@ class CIntegrate_base {
CModel *model;
- CIntegrate_base (double dt_min, double dt_max,
- double eps, double eps_abs, double eps_rel,
+ CIntegrate_base (const double& dt_min, const double& dt_max, const double& dt_cap,
+ const double& eps, const double& eps_abs, const double& eps_rel,
bool inis_owned)
- : _dt_min (dt_min), _dt_max (dt_max),
+ : _dt_min (dt_min), _dt_max (dt_max), _dt_cap (dt_cap),
_eps (eps), _eps_abs (eps_abs), _eps_rel (eps_rel),
dt (dt_min),
is_owned (inis_owned)
diff --git a/upstream/src/libcn/integrate-rk65.hh b/upstream/src/libcn/integrate-rk65.hh
index 8f84941..874b757 100644
--- a/upstream/src/libcn/integrate-rk65.hh
+++ b/upstream/src/libcn/integrate-rk65.hh
@@ -1,26 +1,28 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny
+ * File name: libcn/integrate-rk65.hh
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny
+ * Initial version: 2008-09-23
*
- * License: GPL-2+
+ * Purpose: A Runge-Kutta 6-5 integrator.
*
- * Initial version: 2008-09-23
- *
- * A Runge-Kutta 6-5 integrator
+ * License: GPL
*/
-
#ifndef CNRUN_LIBCN_INTEGRATERK65_H_
#define CNRUN_LIBCN_INTEGRATERK65_H_
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
+
#include <vector>
#include "libstilton/lang.hh"
#include "forward-decls.hh"
#include "integrate-base.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
+using namespace std;
namespace cnrun {
@@ -30,13 +32,11 @@ class CIntegrateRK65
DELETE_DEFAULT_METHODS (CIntegrateRK65)
public:
- double _dt_max_cap;
-
- CIntegrateRK65( double dt_min = 1e-6, double dt_max = .5, double dt_max_cap = 5,
- double eps = 1e-8, double eps_abs = 1e-12, double eps_rel = 1e-6,
- bool inis_owned = true)
- : CIntegrate_base (dt_min, dt_max, eps, eps_abs, eps_rel, is_owned),
- _dt_max_cap (dt_max_cap)
+ CIntegrateRK65 (double dt_min_ = 1e-6, double dt_max_ = .5, double dt_cap_ = 5,
+ double eps_ = 1e-8, double eps_abs_ = 1e-12, double eps_rel_ = 1e-6,
+ bool is_owned_ = true)
+ : CIntegrate_base (dt_min_, dt_max_, dt_cap_,
+ eps_, eps_abs_, eps_rel_, is_owned_)
{}
void cycle() __attribute__ ((hot));
@@ -44,7 +44,7 @@ class CIntegrateRK65
void prepare();
private:
- std::vector<double> Y[9], F[9], y5;
+ vector<double> Y[9], F[9], y5;
};
}
diff --git a/upstream/src/libcn/model-cycle.cc b/upstream/src/libcn/model-cycle.cc
index dc76682..50c9de6 100644
--- a/upstream/src/libcn/model-cycle.cc
+++ b/upstream/src/libcn/model-cycle.cc
@@ -1,29 +1,27 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: cnrun/model-cycle.cc
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * Initial version: 2008-08-02
*
- * License: GPL-2+
+ * Purpose: CModel top cycle
*
- * Initial version: 2008-08-02
- *
- * CModel top cycle
+ * License: GPL
*/
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
#include <csignal>
-#include <iostream>
#include <ctime>
#include <cstdlib>
+#include <iostream>
#include "libstilton/lang.hh"
-
#include "integrate-rk65.hh"
#include "model.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
using namespace std;
@@ -133,7 +131,7 @@ cycle()
}
// kinkiness in synapses causes dt to rocket
- double dtx = min( _dt_max, dt * _dt_max_cap);
+ double dtx = min( _dt_max, dt * _dt_cap);
// determine minimal necessary new dt to get error < eps based on the
// difference between results in y5 and W
@@ -141,8 +139,8 @@ cycle()
// exclude time (at index 0)
//#pragma omp parallel for private(try_eps,delta,try_dtj)
for ( k = 1; k < model->_var_cnt; ++k ) {
- try_eps = max (_eps_abs, min (_eps, abs(_eps_rel * model->W[k])));
- delta = abs (model->W[k] - y5[k]);
+ try_eps = max( _eps_abs, min (_eps, abs(_eps_rel * model->W[k])));
+ delta = abs( model->W[k] - y5[k]);
if ( delta > DBL_EPSILON * y5[k] ) {
try_dt = exp( (log(try_eps) - log(delta)) / 6) * dt;
if ( try_dt < dtx )
@@ -185,7 +183,7 @@ advance( const double dist, double * const cpu_time_used_p)
signal( SIGINT, ctrl_c_handler);
if ( units.size() == 0 ) {
- fprintf( stderr, "Model is empty\n");
+ vp( 1, "Model is empty\n");
return 0;
}
if ( is_ready )
@@ -216,9 +214,9 @@ _setup_schedulers()
regular_periods_last_checked.clear();
if ( units_with_periodic_sources.size() ) { // determine period(s) at which to wake up reader update loop
for ( auto& U : units_with_periodic_sources )
- for ( auto& S : U -> sources )
+ for ( auto& S : U -> _sources )
regular_periods.push_back(
- (reinterpret_cast<CSourcePeriodic*>(S.source)) -> period);
+ (reinterpret_cast<CSourcePeriodic*>(S.source)) -> period());
regular_periods.unique();
regular_periods.sort();
regular_periods_last_checked.resize( regular_periods.size());
@@ -271,7 +269,7 @@ prepare_advance()
auto I = regular_periods.begin(); \
auto Ic = regular_periods_last_checked.begin(); \
for ( ; I != regular_periods.end(); ++I, ++Ic ) \
- if ( unlikely(model_time() >= *I * (*Ic + 1)) ) { \
+ if ( unlikely (model_time() >= *I * (*Ic + 1)) ) { \
(*Ic)++; \
make_units_with_periodic_sources_apprise_from_sources(); \
} \
@@ -335,6 +333,7 @@ prepare_advance()
#define _DO_ADVANCE_COMMON_EPILOG \
+ make_spikeloggers_sync_history(); \
cpu_time_ended = clock(); \
double cpu_time_taken_seconds = ((double) (cpu_time_ended - cpu_time_started)) / CLOCKS_PER_SEC; \
if ( cpu_time_used_p ) \
diff --git a/upstream/src/libcn/model-nmlio.cc b/upstream/src/libcn/model-nmlio.cc
index 2868194..7c6dddf 100644
--- a/upstream/src/libcn/model-nmlio.cc
+++ b/upstream/src/libcn/model-nmlio.cc
@@ -1,13 +1,18 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
+ * File name: libcn/model-nmlio.cc
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * Initial version: 2008-09-02
*
- * License: GPL-2+
+ * Purpose: NeuroML import/export.
*
- * Initial version: 2008-09-02
- *
- * Purpose: NeuroML import/export methods for CModel
+ * License: GPL
*/
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
+
#include <string>
#include <iostream>
#include <regex.h>
@@ -15,10 +20,6 @@
#include "forward-decls.hh"
#include "model.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
using namespace std;
@@ -87,7 +88,7 @@ import_NetworkML( xmlDoc *doc, const string& fname,
// read meta:notes and make out a name for the model
if ( !root_node ) {
- fprintf( stderr, "Failed to obtain root element\n");
+ vp( 0, stderr, "import_NetworkML(\"%s\"): No root element\n", fname.c_str());
retval = TNMLIOResult::noelem;
goto out;
}
@@ -114,7 +115,7 @@ import_NetworkML( xmlDoc *doc, const string& fname,
name = "(unnamed)";
}
- vp( 0, "Model \"%s\": %s topology from %s\n",
+ vp( 1, "Model \"%s\": %s topology from %s\n",
name.c_str(),
(import_option == TNMLImportOption::merge) ?"Merging" :"Importing",
fname.c_str());
@@ -138,16 +139,13 @@ import_NetworkML( xmlDoc *doc, const string& fname,
if ( (retval = _process_projections( n->children)) < 0 )
goto out;
} else
- if ( options.verbosely > 2 )
- cout << "No projections found\n";
+ vp( 2, "No projections found\n");
out:
// we are done with topology; now put units' variables on a vector
finalize_additions();
// can call time_step only after finalize_additions
- cout << endl;
-
return retval;
}
@@ -174,7 +172,7 @@ _process_populations( xmlNode *n)
// with a catch that libxml functions
// expect strings pointed to to be good UTF
if ( !group_id_s ) {
- fprintf( stderr, "<population> element missing a \"name\" attribute near line %d\n", n->line);
+ vp( 0, stderr, "<population> element missing a \"name\" attribute near line %d\n", n->line);
return TNMLIOResult::badattr;
}
// probably having an unnamed popuation isn't an error so serious as to abort the
@@ -187,8 +185,8 @@ _process_populations( xmlNode *n)
// but well, let's check if we have units of that species in stock
if ( !unit_species_is_neuron((char*)cell_type_s) && !unit_family_is_neuron((char*)cell_type_s) ) {
- fprintf( stderr, "Bad cell species or family (\"%s\") in population \"%s\"\n",
- (char*)cell_type_s, group_id_s);
+ vp( 0, stderr, "Bad cell species or family (\"%s\") in population \"%s\"\n",
+ (char*)cell_type_s, group_id_s);
throw TNMLIOResult::badcelltype;
}
diff --git a/upstream/src/libcn/model-struct.cc b/upstream/src/libcn/model-struct.cc
index 14ee703..2fe4ff0 100644
--- a/upstream/src/libcn/model-struct.cc
+++ b/upstream/src/libcn/model-struct.cc
@@ -1,14 +1,19 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny
+ * File name: libcn/mmodel-struct.cc
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny
+ * Initial version: 2008-09-02
*
- * License: GPL-2+
+ * Purpose: CModel household.
*
- * Initial version: 2008-09-02
- *
- * CModel household
+ * License: GPL
*/
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
+
#include <sys/time.h>
#include <csignal>
#include <iostream>
@@ -16,23 +21,18 @@
#include <algorithm>
#include <functional>
-#include <regex.h>
-
#include "libstilton/string.hh"
-
#include "model.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
using namespace std;
using namespace cnrun::stilton::str;
cnrun::CModel::
-CModel( const string& inname, CIntegrate_base *inintegrator, const SModelOptions& inoptions)
+CModel (const string& inname,
+ CIntegrate_base *inintegrator,
+ const SModelOptions& inoptions)
: name (inname),
options (inoptions),
_global_unit_id_reservoir (0l),
@@ -86,9 +86,9 @@ cnrun::CModel::
delete _dt_logger;
delete _spike_logger;
- while ( sources.size() ) {
- delete sources.back();
- sources.pop_back();
+ while ( _sources.size() ) {
+ delete _sources.back();
+ _sources.pop_back();
}
gsl_rng_free( _rng);
@@ -111,7 +111,8 @@ reset( TResetOption option)
regular_periods.clear();
regular_periods_last_checked.clear();
- // this will cause scheduler_update_periods_* to be recomputed by prepare_advance()
+ // this will cause scheduler_update_periods_* to be recomputed by prepare_advance()
+
is_ready = false;
if ( options.log_dt ) {
@@ -126,11 +127,6 @@ reset( TResetOption option)
-
-
-
-
-
cnrun::C_BaseUnit*
cnrun::CModel::
unit_by_label( const string& label) const
@@ -141,6 +137,7 @@ unit_by_label( const string& label) const
return nullptr;
}
+
cnrun::C_BaseNeuron*
cnrun::CModel::
neuron_by_label( const string& label) const
@@ -151,6 +148,7 @@ neuron_by_label( const string& label) const
return nullptr;
}
+
cnrun::C_BaseSynapse*
cnrun::CModel::
synapse_by_label( const string& label) const
@@ -165,8 +163,6 @@ synapse_by_label( const string& label) const
-
-
// ----- registering units with core lists
void
cnrun::CModel::
@@ -174,8 +170,8 @@ _include_base_unit( C_BaseUnit* u)
{
if ( any_of( units.begin(), units.end(),
bind(equal_to<C_BaseUnit*>(), placeholders::_1, u)) )
- fprintf( stderr, "Unit %s found already included in model %s\n",
- u->_label, name.c_str());
+ vp( 1, stderr, "Unit %s found already included in model %s\n",
+ u->_label, name.c_str());
else
units.push_back( u);
@@ -186,8 +182,8 @@ _include_base_unit( C_BaseUnit* u)
if ( u->is_listening() ) {
if ( count( listening_units.begin(), listening_units.end(), u) )
- fprintf( stderr, "Unit \"%s\" already on listening list\n",
- u->_label);
+ vp( 1, stderr, "Unit \"%s\" already on listening list\n",
+ u->_label);
else
listening_units.push_back( u);
}
@@ -334,12 +330,14 @@ exclude_unit( C_BaseUnit *u, const TExcludeOption option)
(_var_cnt - our_idx - u->v_no()) * sizeof(double));
V.resize( _var_cnt -= u->v_no());
}
+
if ( u->is_ddtbound() ) {
if ( u->is_neuron() )
ddtbound_neurons.erase( find( ddtbound_neurons.begin(), ddtbound_neurons.end(), u));
else
ddtbound_synapses.erase( find( ddtbound_synapses.begin(), ddtbound_synapses.end(), u));
}
+
if ( !u->is_hostable() ) {
if ( u->is_neuron() )
standalone_neurons.remove(
@@ -411,18 +409,13 @@ unregister_spikelogger( C_BaseNeuron *n)
-
-
-
-
-
// units with sources
void
cnrun::CModel::
register_unit_with_sources( C_BaseUnit *u)
{
- for ( auto& I : u->sources )
+ for ( auto& I : u->_sources )
if ( I.source->is_periodic() )
units_with_periodic_sources.push_back( u);
else
@@ -545,7 +538,7 @@ add_synapse_species( const string& type_s,
TUnitType ytype = unit_species_by_string( type_s);
bool given_species = true;
if ( ytype == NT_VOID && (given_species = false, ytype = unit_family_by_string( type_s)) == NT_VOID ) {
- fprintf( stderr, "Unrecognised synapse species or family: \"%s\"\n", type_s.c_str());
+ vp( 0, stderr, "Unrecognised synapse species or family: \"%s\"\n", type_s.c_str());
return nullptr;
}
@@ -553,7 +546,7 @@ add_synapse_species( const string& type_s,
*src = neuron_by_label( src_l),
*tgt = neuron_by_label( tgt_l);
if ( !src || !tgt ) {
- fprintf( stderr, "Phoney source (\"%s\") or target (\"%s\")\n", src_l.c_str(), tgt_l.c_str());
+ vp( 0, stderr, "Phony source (\"%s\") or target (\"%s\")\n", src_l.c_str(), tgt_l.c_str());
return nullptr;
}
@@ -620,12 +613,12 @@ add_synapse_species( const string& type_s,
else
ytype = YT_MAP;
else {
- fprintf( stderr, "Map synapses can only connect Map neurons\n");
+ vp( 0, stderr, "Map synapses can only connect Map neurons\n");
return nullptr;
}
break;
default:
- printf( "Teleporting is fun!\n");
+ vp( 0, stderr, "Bad synapse type: %s\n", type_s.c_str());
return nullptr;
}
@@ -664,12 +657,12 @@ add_synapse_species( TUnitType ytype,
case YT_AB_RD:
if ( src->traits() & UT_RATEBASED && !(tgt->traits() & UT_RATEBASED) && !(src->traits() & UT_DOT) )
// y = new CSynapseAB_rd( synapse_id, src, tgt, this, CN_UOWNED, false);
- fprintf( stderr, "AB_rd not implemented\n");
+ throw "AB_rd not implemented";
break;
case YT_AB_DR:
if ( !(src->traits() & UT_RATEBASED) && tgt->traits() & UT_RATEBASED && !(src->traits() & UT_DOT) )
// y = new CSynapseAB_rr( synapse_id, src, tgt, this, CN_UOWNED, false);
- fprintf( stderr, "AB_dr not implemented\n");
+ throw "AB_dr not implemented";
break;
case YT_AB_DD:
if ( !(src->traits() & UT_RATEBASED) && !(tgt->traits() & UT_RATEBASED) && !(src->traits() & UT_DOT) )
@@ -688,17 +681,17 @@ add_synapse_species( TUnitType ytype,
case YT_ABMINUS_RR:
if ( src->traits() & UT_RATEBASED && tgt->traits() & UT_RATEBASED && !(src->traits() & UT_DOT) )
// y = new CSynapseABMINUS_rr( src, tgt, g, this, CN_UOWNED, include_option);
- fprintf( stderr, "ABMINUS_rr not implemented\n");
+ throw "ABMINUS_rr not implemented";
break;
case YT_ABMINUS_RD:
if ( src->traits() & UT_RATEBASED && !(tgt->traits() & UT_RATEBASED) && !(src->traits() & UT_DOT) )
// y = new CSynapseABMINUS_rd( synapse_id, src, tgt, this, CN_UOWNED, false);
- fprintf( stderr, "ABMINUS_rd not implemented\n");
+ throw "ABMINUS_rd not implemented";
break;
case YT_ABMINUS_DR:
if ( !(src->traits() & UT_RATEBASED) && tgt->traits() & UT_RATEBASED && !(src->traits() & UT_DOT) )
// y = new CSynapseABMINUS_rr( synapse_id, src, tgt, this, CN_UOWNED, false);
- fprintf( stderr, "ABMINUS_dr not implemented\n");
+ throw "ABMINUS_dr not implemented";
break;
case YT_ABMINUS_DD:
if ( !(src->traits() & UT_RATEBASED) && !(tgt->traits() & UT_RATEBASED) && !(src->traits() & UT_DOT) )
@@ -707,29 +700,29 @@ add_synapse_species( TUnitType ytype,
case YT_MXABMINUS_DR:
if ( !(src->traits() & UT_RATEBASED) && tgt->traits() & UT_RATEBASED && src->traits() & UT_DOT )
// y = new CSynapseMxABMinus_dr( src, tgt, g, this, CN_UOWNED, include_option);
- fprintf( stderr, "MxABMinus_dr not implemented\n");
+ throw "MxABMinus_dr not implemented";
break;
case YT_MXABMINUS_DD:
if ( !(src->traits() & UT_RATEBASED) && !(tgt->traits() & UT_RATEBASED) && src->traits() & UT_DOT )
// y = new CSynapseMxABMinus_dd( src, tgt, g, this, CN_UOWNED, include_option);
- fprintf( stderr, "MxABMinus_dd not implemented\n");
+ throw "MxABMinus_dd not implemented";
break;
case YT_RALL_RR:
if ( src->traits() & UT_RATEBASED && tgt->traits() & UT_RATEBASED && !(src->traits() & UT_DOT) )
// y = new CSynapseRall_rr( src, tgt, g, this, CN_UOWNED, include_option);
- fprintf( stderr, "Rall_rr not implemented\n");
+ throw "Rall_rr not implemented";
break;
case YT_RALL_RD:
if ( src->traits() & UT_RATEBASED && !(tgt->traits() & UT_RATEBASED) && !(src->traits() & UT_DOT) )
// y = new CSynapseRall_rd( synapse_id, src, tgt, this, CN_UOWNED, false);
- fprintf( stderr, "Rall_rd not implemented\n");
+ throw "Rall_rd not implemented";
break;
case YT_RALL_DR:
if ( !(src->traits() & UT_RATEBASED) && tgt->traits() & UT_RATEBASED && !(src->traits() & UT_DOT) )
// y = new CSynapseRall_rr( synapse_id, src, tgt, this, CN_UOWNED, false);
- fprintf( stderr, "Rall_dr not implemented\n");
+ throw "Rall_dr not implemented";
break;
case YT_RALL_DD:
if ( !(src->traits() & UT_RATEBASED) && !(tgt->traits() & UT_RATEBASED) && !(src->traits() & UT_DOT) )
@@ -738,12 +731,12 @@ add_synapse_species( TUnitType ytype,
case YT_MXRALL_DR:
if ( !(src->traits() & UT_RATEBASED) && tgt->traits() & UT_RATEBASED && src->traits() & UT_DOT )
// y = new CSynapseMxRall_dr( src, tgt, g, this, CN_UOWNED, include_option);
- fprintf( stderr, "MxRall_dr not implemented\n");
+ throw "MxRall_dr not implemented";
break;
case YT_MXRALL_DD:
if ( !(src->traits() & UT_RATEBASED) && !(tgt->traits() & UT_RATEBASED) && src->traits() & UT_DOT )
// y = new CSynapseMxRall_dd( src, tgt, g, this, CN_UOWNED, include_option);
- fprintf( stderr, "MxRall_dd not implemented\n");
+ throw "MxRall_dd not implemented";
break;
@@ -754,7 +747,7 @@ add_synapse_species( TUnitType ytype,
else
y = new CSynapseMap( src, tgt, g, this, CN_UOWNED);
else
- fprintf( stderr, "Map synapses can only connect Map neurons\n");
+ throw "Map synapses can only connect Map neurons";
break;
default:
@@ -892,315 +885,6 @@ reset_state_all_units()
-// tags
-
-int
-cnrun::CModel::
-process_listener_tags( const list<STagGroupListener> &Listeners)
-{
- regex_t RE;
- for ( auto& P : Listeners ) {
- if (0 != regcomp( &RE, P.pattern.c_str(), REG_EXTENDED | REG_NOSUB)) {
- fprintf( stderr, "Invalid regexp in process_listener_tags: \"%s\"\n", P.pattern.c_str());
- continue;
- }
- for ( auto& Ui : units ) {
- auto& U = *Ui;
- if ( regexec( &RE, U._label, 0, 0, 0) == 0 ) {
- if ( P.enable ) {
- U.start_listening( P.bits);
- vp( 3, " (unit \"%s\" listening%s)\n",
- U._label, P.bits & CN_ULISTENING_1VARONLY ? ", to one var only" :"");
- } else {
- U.stop_listening();
- vp( 3, " (unit \"%s\" not listening)\n", U._label);
- }
- }
- }
- }
-
- return 0;
-}
-
-
-int
-cnrun::CModel::
-process_spikelogger_tags( const list<STagGroupSpikelogger> &Spikeloggers)
-{
- regex_t RE;
- for ( auto& P : Spikeloggers ) {
- if (0 != regcomp( &RE, P.pattern.c_str(), REG_EXTENDED | REG_NOSUB)) {
- fprintf( stderr, "Invalid regexp in process_spikelogger_tags: \"%s\"\n", P.pattern.c_str());
- continue;
- }
- for ( auto& Ni : standalone_neurons ) {
- auto& N = *Ni;
- if ( regexec( &RE, N._label, 0, 0, 0) == 0 ) {
- if ( P.enable ) {
- bool log_sdf = !(P.period == 0. || P.sigma == 0.);
- if ( ( log_sdf && !N.enable_spikelogging_service(
- P.period, P.sigma, P.from))
- or
- (!log_sdf && !N.enable_spikelogging_service()) ) {
- fprintf( stderr, "Cannot have \"%s\" log spikes because it is not a conductance-based neuron (of type %s)\n",
- N._label, N.species());
- return -1;
- }
- } else
- N.disable_spikelogging_service();
-
- vp( 3, " (%sabling spike logging for standalone neuron \"%s\")\n",
- P.enable ? "en" : "dis", N._label);
- }
- }
- for ( auto& Ni : hosted_neurons ) {
- auto& N = *Ni;
- if ( regexec( &RE, N._label, 0, 0, 0) == 0 ) {
- if ( P.enable ) {
- bool log_sdf = !(P.period == 0. || P.sigma == 0.);
- if ( ( log_sdf && !N.enable_spikelogging_service( P.period, P.sigma, P.from))
- or
- (!log_sdf && !N.enable_spikelogging_service()) ) {
- fprintf( stderr, "Cannot have \"%s\" log spikes because it is not a conductance-based neuron (of type %s)\n",
- N._label, N.species());
- return -1;
- }
- } else
- N.disable_spikelogging_service();
-
- vp( 3, " (%sabling spike logging for hosted neuron \"%s\")\n",
- P.enable ? "en" : "dis", N._label);
- }
- }
- }
-
- return 0;
-}
-
-
-int
-cnrun::CModel::
-process_putout_tags( const list<STagGroup> &ToRemove)
-{
- // execute some
- regex_t RE;
- for ( auto& P : ToRemove ) {
- if (0 != regcomp( &RE, P.pattern.c_str(), REG_EXTENDED | REG_NOSUB)) {
- fprintf( stderr, "Invalid regexp in process_putout_tags: \"%s\"\n", P.pattern.c_str());
- continue;
- }
- auto Ui = units.rbegin();
- while ( Ui != units.rend() ) {
- ++Ui;
- auto& U = **prev(Ui);
- if ( regexec( &RE, U._label, 0, 0, 0) == 0 ) {
- vp( 2, " (put out unit \"%s\")\n", U._label);
- delete &U;
- }
- }
- }
-
- cull_blind_synapses();
-
- return 0;
-}
-
-
-int
-cnrun::CModel::
-process_decimate_tags( const list<STagGroupDecimate> &ToDecimate)
-{
- // decimate others
- regex_t RE;
- for ( auto& P : ToDecimate ) {
- if (0 != regcomp( &RE, P.pattern.c_str(), REG_EXTENDED | REG_NOSUB)) {
- fprintf( stderr, "Invalid regexp in process_decimate_tags: \"%s\"\n", P.pattern.c_str());
- continue;
- }
-
- // collect group
- vector<C_BaseUnit*> dcmgroup;
- for ( auto& U : units )
- if ( regexec( &RE, U->_label, 0, 0, 0) == 0 )
- dcmgroup.push_back( U);
- random_shuffle( dcmgroup.begin(), dcmgroup.end());
-
- // execute
- size_t to_execute = rint( dcmgroup.size() * P.fraction), n = to_execute;
- while ( n-- )
- delete dcmgroup[n];
-
- vp( 3, " (decimated %4.1f%% (%zu units) of %s)\n",
- P.fraction*100, to_execute, P.pattern.c_str());
-
- }
-
- cull_blind_synapses();
-
- return 0;
-}
-
-
-
-
-
-
-int
-cnrun::CModel::
-process_paramset_static_tags( const list<STagGroupNeuronParmSet> &tags)
-{
- regex_t RE;
- for ( auto& P : tags ) {
- if (0 != regcomp( &RE, P.pattern.c_str(), REG_EXTENDED | REG_NOSUB)) {
- fprintf( stderr, "Invalid regexp in process_paramset_static_tags: \"%s\"\n", P.pattern.c_str());
- continue;
- }
-
- vector<string> current_tag_assigned_labels;
-
- for ( auto& Ui : units ) {
- if ( not Ui->is_neuron() )
- continue;
- auto& N = *static_cast<C_BaseNeuron*>(Ui);
- if ( regexec( &RE, N.label(), 0, 0, 0) == REG_NOMATCH )
- continue;
- // because a named parameter can map to a different param_id in different units, rather
- // do lookup every time
-
- int p_d = -1;
- C_BaseUnit::TSinkType kind = (C_BaseUnit::TSinkType)-1;
- if ( (p_d = N.param_idx_by_sym( P.parm)) != -1 )
- kind = C_BaseUnit::SINK_PARAM;
- else if ( (p_d = N.var_idx_by_sym( P.parm)) != -1 )
- kind = C_BaseUnit::SINK_VAR;
- if ( p_d == -1 ) {
- fprintf( stderr, "%s \"%s\" (type \"%s\") has no parameter or variable named \"%s\"\n",
- N.class_name(), N.label(), N.species(), P.parm.c_str());
- continue;
- }
-
- switch ( kind ) {
- case C_BaseUnit::SINK_PARAM:
- N.param_value(p_d) = P.enable ? P.value : __CNUDT[N.type()].stock_param_values[p_d];
- N.param_changed_hook();
- break;
- case C_BaseUnit::SINK_VAR:
- N.var_value(p_d) = P.value;
- break;
- }
-
- current_tag_assigned_labels.push_back( N.label());
- }
-
- if ( current_tag_assigned_labels.empty() ) {
- fprintf( stderr, "No neuron labelled matching \"%s\"\n", P.pattern.c_str());
- continue;
- }
-
- vp( 3, " set [%s]{%s} = %g\n",
- join(current_tag_assigned_labels, ", ").c_str(),
- P.parm.c_str(), P.value);
- }
- return 0;
-}
-
-
-
-
-
-int
-cnrun::CModel::
-process_paramset_static_tags( const list<STagGroupSynapseParmSet> &tags)
-{
- auto process_tag = [&] (const STagGroupSynapseParmSet& P,
- regex_t& REsrc, regex_t& REtgt) -> void {
- vector<string> current_tag_assigned_labels;
-
- bool do_gsyn = (P.parm == "gsyn");
-
- vp( 5, "== setting %s -> %s {%s} = %g...\n",
- P.pattern.c_str(), P.target.c_str(), P.parm.c_str(), P.value);
-
- for ( auto& Uai : units ) {
- if ( not Uai->is_neuron() )
- continue;
- if ( regexec( &REsrc, Uai->label(), 0, 0, 0) == REG_NOMATCH )
- continue;
- auto& Ua = *static_cast<C_BaseNeuron*>(Uai);
-
- for ( auto& Ubi : units ) {
- if ( not Ubi->is_neuron() )
- continue;
- if ( regexec( &REtgt, Ubi->label(), 0, 0, 0) == REG_NOMATCH ) /* || Ua == Ub */
- continue;
- auto& Ub = *static_cast<C_BaseNeuron*>(Ubi);
- auto y = Ua.connects_via(Ub);
- if ( !y )
- continue;
-
- if ( do_gsyn ) {
- y->set_g_on_target( Ub, P.value);
- current_tag_assigned_labels.push_back( y->label());
- continue;
- }
-
- int p_d = -1;
- C_BaseUnit::TSinkType kind = (C_BaseUnit::TSinkType)-1;
- if ( (p_d = y->param_idx_by_sym( P.parm)) > -1 )
- kind = C_BaseUnit::SINK_PARAM;
- else if ( (p_d = y->var_idx_by_sym( P.parm)) > -1 )
- kind = C_BaseUnit::SINK_VAR;
- if ( p_d == -1 ) {
- fprintf( stderr, "%s \"%s\" (type \"%s\") has no parameter or variable named \"%s\"\n",
- y->class_name(), y->label(), y->species(), P.parm.c_str());
- continue;
- }
-
- switch ( kind ) {
- case C_BaseUnit::SINK_PARAM:
- if ( y->_targets.size() > 1 )
- y = y->make_clone_independent(
- &Ub); // lest brethren synapses to other targets be clobbered
- y->param_value(p_d) = P.enable ? P.value : __CNUDT[y->type()].stock_param_values[p_d];
- y->param_changed_hook();
- break;
- case C_BaseUnit::SINK_VAR:
- y->var_value(p_d) = P.value;
- break;
- }
-
- current_tag_assigned_labels.push_back( y->label());
- }
- }
- if ( current_tag_assigned_labels.empty() ) {
- fprintf( stderr, "No synapse connecting any of \"%s\" to \"%s\"\n", P.pattern.c_str(), P.target.c_str());
- return;
- }
-
- vp( 3, " set [%s]{%s} = %g\n",
- join(current_tag_assigned_labels, ", ").c_str(),
- P.parm.c_str(), P.value);
- };
-
- for ( auto& P : tags ) {
- regex_t REsrc, REtgt;
- if (0 != regcomp( &REsrc, P.pattern.c_str(), REG_EXTENDED | REG_NOSUB) ) { // P->pattern acting as src
- fprintf( stderr, "Invalid regexp in process_paramset_static_tags (src): \"%s\"\n", P.pattern.c_str());
- continue;
- }
- if (0 != regcomp( &REtgt, P.target.c_str(), REG_EXTENDED | REG_NOSUB) ) {
- fprintf( stderr, "Invalid regexp in process_paramset_static_tags (tgt): \"%s\"\n", P.target.c_str());
- continue;
- }
-
- process_tag( P, REsrc, REtgt);
- }
-
- coalesce_synapses();
-
- return 0;
-}
-
void
cnrun::CModel::
@@ -1235,49 +919,6 @@ startover:
-int
-cnrun::CModel::
-process_paramset_source_tags( const list<STagGroupSource> &tags)
-{
- regex_t RE;
- for ( auto& P : tags ) {
- if (0 != regcomp( &RE, P.pattern.c_str(), REG_EXTENDED | REG_NOSUB)) {
- fprintf( stderr, "Invalid regexp in process_paramset_source_tags: \"%s\"\n", P.pattern.c_str());
- continue;
- }
-
- for ( auto& U : units ) {
- if ( regexec( &RE, U->label(), 0, 0, 0) == REG_NOMATCH )
- continue;
-
- int p_d = -1;
- C_BaseUnit::TSinkType kind = (C_BaseUnit::TSinkType)-1;
- if ( (p_d = U->param_idx_by_sym( P.parm)) > -1 )
- kind = C_BaseUnit::SINK_PARAM;
- else if ( (p_d = U->var_idx_by_sym( P.parm)) > -1 )
- kind = C_BaseUnit::SINK_VAR;
- if ( p_d == -1 ) {
- fprintf( stderr, "%s \"%s\" (type \"%s\") has no parameter or variable named \"%s\"\n",
- U->class_name(), U->label(), U->species(), P.parm.c_str());
- continue;
- }
-
- if ( P.enable ) {
- U -> attach_source( P.source, kind, p_d);
- vp( 3, "Connected source \"%s\" to \"%s\"{%s}\n",
- P.source->name.c_str(), U->label(), P.parm.c_str());
- } else {
- U -> detach_source( P.source, kind, p_d);
- vp( 3, "Disconnected source \"%s\" from \"%s\"{%s}\n",
- P.source->name.c_str(), U->label(), P.parm.c_str());
- }
- }
- }
-
- return 0;
-}
-
-
inline const char*
diff --git a/upstream/src/libcn/model-tags.cc b/upstream/src/libcn/model-tags.cc
new file mode 100644
index 0000000..eab56f2
--- /dev/null
+++ b/upstream/src/libcn/model-tags.cc
@@ -0,0 +1,422 @@
+/*
+ * File name: libcn/mmodel-tags.cc
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny
+ * Initial version: 2014-09-25
+ *
+ * Purpose: CModel household (process_*_tags(), and other methods using regexes).
+ *
+ * License: GPL
+ */
+
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
+
+#include <regex.h>
+
+#include "libstilton/string.hh"
+#include "model.hh"
+
+
+using namespace std;
+
+vector<cnrun::C_BaseUnit*>
+cnrun::CModel::
+list_units( const string& label) const
+{
+ vector<C_BaseUnit*> Q;
+
+ regex_t RE;
+ if ( 0 != regcomp( &RE, label.c_str(), REG_EXTENDED | REG_NOSUB)) {
+ vp( 0, stderr, "Invalid regexp in list_units: \"%s\"\n", label.c_str());
+ return move(Q);
+ }
+
+ for ( auto& U : units )
+ if ( regexec( &RE, U->label(), 0, 0, 0) != REG_NOMATCH )
+ Q.push_back(U);
+
+ return move(Q);
+}
+
+
+// tags
+
+size_t
+cnrun::CModel::
+process_listener_tags( const list<STagGroupListener> &Listeners)
+{
+ size_t count = 0;
+ regex_t RE;
+ for ( auto& P : Listeners ) {
+ if (0 != regcomp( &RE, P.pattern.c_str(), REG_EXTENDED | REG_NOSUB)) {
+ vp( 0, stderr, "Invalid regexp in process_listener_tags: \"%s\"\n", P.pattern.c_str());
+ continue;
+ }
+ for ( auto& Ui : units ) {
+ auto& U = *Ui;
+ if ( regexec( &RE, U._label, 0, 0, 0) == 0 ) {
+ if ( P.invert_option == STagGroup::TInvertOption::no ) {
+ U.start_listening( P.bits);
+ vp( 3, " (unit \"%s\" listening%s)\n",
+ U._label, P.bits & CN_ULISTENING_1VARONLY ? ", to one var only" :"");
+ } else {
+ U.stop_listening();
+ vp( 3, " (unit \"%s\" not listening)\n", U._label);
+ }
+ ++count;
+ }
+ }
+ }
+
+ return count;
+}
+
+
+size_t
+cnrun::CModel::
+process_spikelogger_tags( const list<STagGroupSpikelogger> &Spikeloggers)
+{
+ size_t count = 0;
+ regex_t RE;
+ for ( auto& P : Spikeloggers ) {
+ if (0 != regcomp( &RE, P.pattern.c_str(), REG_EXTENDED | REG_NOSUB)) {
+ vp( 0, stderr, "Invalid regexp in process_spikelogger_tags: \"%s\"\n", P.pattern.c_str());
+ continue;
+ }
+ for ( auto& Ni : standalone_neurons ) {
+ auto& N = *Ni;
+ if ( regexec( &RE, N._label, 0, 0, 0) == 0 ) {
+ if ( P.invert_option == STagGroup::TInvertOption::no ) {
+ bool log_sdf = !(P.period == 0. || P.sigma == 0.);
+ if ( ( log_sdf && !N.enable_spikelogging_service(
+ P.period, P.sigma, P.from))
+ or
+ (!log_sdf && !N.enable_spikelogging_service()) ) {
+ vp( 0, stderr, "Cannot have \"%s\" log spikes because it is not a conductance-based neuron (of type %s)\n",
+ N._label, N.species());
+ continue;
+ }
+ } else
+ N.disable_spikelogging_service();
+ ++count;
+
+ vp( 3, " (%sabling spike logging for standalone neuron \"%s\")\n",
+ (P.invert_option == STagGroup::TInvertOption::no) ? "en" : "dis", N._label);
+ }
+ }
+ for ( auto& Ni : hosted_neurons ) {
+ auto& N = *Ni;
+ if ( regexec( &RE, N._label, 0, 0, 0) == 0 ) {
+ if ( P.invert_option == STagGroup::TInvertOption::no ) {
+ bool log_sdf = !(P.period == 0. || P.sigma == 0.);
+ if ( ( log_sdf && !N.enable_spikelogging_service( P.period, P.sigma, P.from))
+ or
+ (!log_sdf && !N.enable_spikelogging_service()) ) {
+ vp( 1, stderr, "Cannot have \"%s\" log spikes because it is not a conductance-based neuron (of type %s)\n",
+ N._label, N.species());
+ return -1;
+ }
+ } else
+ N.disable_spikelogging_service();
+ ++count;
+
+ vp( 3, " (%sabling spike logging for hosted neuron \"%s\")\n",
+ (P.invert_option == STagGroup::TInvertOption::no) ? "en" : "dis", N._label);
+ }
+ }
+ }
+
+ return count;
+}
+
+
+size_t
+cnrun::CModel::
+process_putout_tags( const list<STagGroup> &ToRemove)
+{
+ size_t count = 0;
+ // execute some
+ regex_t RE;
+ for ( auto& P : ToRemove ) {
+ if (0 != regcomp( &RE, P.pattern.c_str(), REG_EXTENDED | REG_NOSUB)) {
+ vp( 0, stderr, "Invalid regexp in process_putout_tags: \"%s\"\n", P.pattern.c_str());
+ continue;
+ }
+ auto Ui = units.rbegin();
+ while ( Ui != units.rend() ) {
+ ++Ui;
+ auto& U = **prev(Ui);
+ if ( regexec( &RE, U._label, 0, 0, 0) == 0 ) {
+ vp( 2, " (put out unit \"%s\")\n", U._label);
+ delete &U;
+ ++count;
+ }
+ }
+ }
+
+ cull_blind_synapses();
+
+ return count;
+}
+
+
+size_t
+cnrun::CModel::
+process_decimate_tags( const list<STagGroupDecimate> &ToDecimate)
+{
+ size_t count = 0;
+ // decimate others
+ regex_t RE;
+ for ( auto& P : ToDecimate ) {
+ if (0 != regcomp( &RE, P.pattern.c_str(), REG_EXTENDED | REG_NOSUB)) {
+ vp( 0, stderr, "Invalid regexp in process_decimate_tags: \"%s\"\n", P.pattern.c_str());
+ continue;
+ }
+
+ // collect group
+ vector<C_BaseUnit*> dcmgroup;
+ for ( auto& U : units )
+ if ( regexec( &RE, U->_label, 0, 0, 0) == 0 )
+ dcmgroup.push_back( U);
+ random_shuffle( dcmgroup.begin(), dcmgroup.end());
+
+ // execute
+ size_t to_execute = rint( dcmgroup.size() * P.fraction), n = to_execute;
+ while ( n-- ) {
+ delete dcmgroup[n];
+ ++count;
+ }
+
+ vp( 3, " (decimated %4.1f%% (%zu units) of %s)\n",
+ P.fraction*100, to_execute, P.pattern.c_str());
+
+ }
+
+ cull_blind_synapses();
+
+ return count;
+}
+
+
+
+
+
+
+size_t
+cnrun::CModel::
+process_paramset_static_tags( const list<STagGroupNeuronParmSet> &tags)
+{
+ size_t count = 0;
+ regex_t RE;
+ for ( auto& P : tags ) {
+ if (0 != regcomp( &RE, P.pattern.c_str(), REG_EXTENDED | REG_NOSUB)) {
+ vp( 0, stderr, "Invalid regexp in process_paramset_static_tags: \"%s\"\n", P.pattern.c_str());
+ continue;
+ }
+
+ vector<string> current_tag_assigned_labels;
+
+ for ( auto& Ui : units ) {
+ if ( not Ui->is_neuron() )
+ continue;
+ auto& N = *static_cast<C_BaseNeuron*>(Ui);
+ if ( regexec( &RE, N.label(), 0, 0, 0) == REG_NOMATCH )
+ continue;
+ // because a named parameter can map to a different param_id in different units, rather
+ // do lookup every time
+
+ int p_d = -1;
+ C_BaseUnit::TSinkType kind = (C_BaseUnit::TSinkType)-1;
+ if ( (p_d = N.param_idx_by_sym( P.parm)) != -1 )
+ kind = C_BaseUnit::SINK_PARAM;
+ else if ( (p_d = N.var_idx_by_sym( P.parm)) != -1 )
+ kind = C_BaseUnit::SINK_VAR;
+ if ( p_d == -1 ) {
+ vp( 1, stderr, "%s \"%s\" (type \"%s\") has no parameter or variable named \"%s\"\n",
+ N.class_name(), N.label(), N.species(), P.parm.c_str());
+ continue;
+ }
+
+ switch ( kind ) {
+ case C_BaseUnit::SINK_PARAM:
+ N.param_value(p_d) = (P.invert_option == STagGroup::TInvertOption::no)
+ ? P.value : __CNUDT[N.type()].stock_param_values[p_d];
+ N.param_changed_hook();
+ break;
+ case C_BaseUnit::SINK_VAR:
+ N.var_value(p_d) = P.value;
+ break;
+ }
+ ++count;
+
+ current_tag_assigned_labels.push_back( N.label());
+ }
+
+ if ( current_tag_assigned_labels.empty() ) {
+ vp( 1, stderr, "No neuron labelled matching \"%s\"\n", P.pattern.c_str());
+ continue;
+ }
+
+ vp( 3, " set [%s]{%s} = %g\n",
+ stilton::str::join(current_tag_assigned_labels, ", ").c_str(),
+ P.parm.c_str(), P.value);
+ }
+
+ return count;
+}
+
+
+
+
+
+size_t
+cnrun::CModel::
+process_paramset_static_tags( const list<STagGroupSynapseParmSet> &tags)
+{
+ size_t count = 0;
+ auto process_tag = [&] (const STagGroupSynapseParmSet& P,
+ regex_t& REsrc, regex_t& REtgt) -> void {
+ vector<string> current_tag_assigned_labels;
+
+ bool do_gsyn = (P.parm == "gsyn");
+
+ vp( 5, "== setting %s -> %s {%s} = %g...\n",
+ P.pattern.c_str(), P.target.c_str(), P.parm.c_str(), P.value);
+
+ for ( auto& Uai : units ) {
+ if ( not Uai->is_neuron() )
+ continue;
+ if ( regexec( &REsrc, Uai->label(), 0, 0, 0) == REG_NOMATCH )
+ continue;
+ auto& Ua = *static_cast<C_BaseNeuron*>(Uai);
+
+ for ( auto& Ubi : units ) {
+ if ( not Ubi->is_neuron() )
+ continue;
+ if ( regexec( &REtgt, Ubi->label(), 0, 0, 0) == REG_NOMATCH ) /* || Ua == Ub */
+ continue;
+ auto& Ub = *static_cast<C_BaseNeuron*>(Ubi);
+ auto y = Ua.connects_via(Ub);
+ if ( !y )
+ continue;
+
+ if ( do_gsyn ) {
+ y->set_g_on_target( Ub, P.value);
+ current_tag_assigned_labels.push_back( y->label());
+ continue;
+ }
+
+ int p_d = -1;
+ C_BaseUnit::TSinkType kind = (C_BaseUnit::TSinkType)-1;
+ if ( (p_d = y->param_idx_by_sym( P.parm)) > -1 )
+ kind = C_BaseUnit::SINK_PARAM;
+ else if ( (p_d = y->var_idx_by_sym( P.parm)) > -1 )
+ kind = C_BaseUnit::SINK_VAR;
+ if ( p_d == -1 ) {
+ vp( 1, stderr, "%s \"%s\" (type \"%s\") has no parameter or variable named \"%s\"\n",
+ y->class_name(), y->label(), y->species(), P.parm.c_str());
+ continue;
+ }
+
+ switch ( kind ) {
+ case C_BaseUnit::SINK_PARAM:
+ if ( y->_targets.size() > 1 )
+ y = y->make_clone_independent(
+ &Ub); // lest brethren synapses to other targets be clobbered
+ y->param_value(p_d) = (P.invert_option == STagGroup::TInvertOption::no)
+ ? P.value : __CNUDT[y->type()].stock_param_values[p_d];
+ y->param_changed_hook();
+ break;
+ case C_BaseUnit::SINK_VAR:
+ y->var_value(p_d) = P.value;
+ break;
+ }
+ ++count;
+
+ current_tag_assigned_labels.push_back( y->label());
+ }
+ }
+ if ( current_tag_assigned_labels.empty() ) {
+ vp( 1, stderr, "No synapse connecting any of \"%s\" to \"%s\"\n", P.pattern.c_str(), P.target.c_str());
+ return;
+ }
+
+ vp( 3, " set [%s]{%s} = %g\n",
+ stilton::str::join(current_tag_assigned_labels, ", ").c_str(),
+ P.parm.c_str(), P.value);
+ };
+
+ for ( auto& P : tags ) {
+ regex_t REsrc, REtgt;
+ if (0 != regcomp( &REsrc, P.pattern.c_str(), REG_EXTENDED | REG_NOSUB) ) { // P->pattern acting as src
+ vp( 0, stderr, "Invalid regexp in process_paramset_static_tags (src): \"%s\"\n", P.pattern.c_str());
+ continue;
+ }
+ if (0 != regcomp( &REtgt, P.target.c_str(), REG_EXTENDED | REG_NOSUB) ) {
+ vp( 0, stderr, "Invalid regexp in process_paramset_static_tags (tgt): \"%s\"\n", P.target.c_str());
+ continue;
+ }
+
+ process_tag( P, REsrc, REtgt);
+ }
+
+ coalesce_synapses();
+
+ return count;
+}
+
+
+
+size_t
+cnrun::CModel::
+process_paramset_source_tags( const list<STagGroupSource> &tags)
+{
+ size_t count = 0;
+ regex_t RE;
+ for ( auto& P : tags ) {
+ if (0 != regcomp( &RE, P.pattern.c_str(), REG_EXTENDED | REG_NOSUB)) {
+ vp( 0, stderr, "Invalid regexp in process_paramset_source_tags: \"%s\"\n", P.pattern.c_str());
+ continue;
+ }
+
+ for ( auto& U : units ) {
+ if ( regexec( &RE, U->label(), 0, 0, 0) == REG_NOMATCH )
+ continue;
+
+ int p_d = -1;
+ C_BaseUnit::TSinkType kind = (C_BaseUnit::TSinkType)-1;
+ if ( (p_d = U->param_idx_by_sym( P.parm)) > -1 )
+ kind = C_BaseUnit::SINK_PARAM;
+ else if ( (p_d = U->var_idx_by_sym( P.parm)) > -1 )
+ kind = C_BaseUnit::SINK_VAR;
+ if ( p_d == -1 ) {
+ vp( 1, stderr, "%s \"%s\" (type \"%s\") has no parameter or variable named \"%s\"\n",
+ U->class_name(), U->label(), U->species(), P.parm.c_str());
+ continue;
+ }
+
+ if ( P.invert_option == STagGroup::TInvertOption::no ) {
+ U -> attach_source( P.source, kind, p_d);
+ vp( 3, "Connected source \"%s\" to \"%s\"{%s}\n",
+ P.source->name(), U->label(), P.parm.c_str());
+ } else {
+ U -> detach_source( P.source, kind, p_d);
+ vp( 3, "Disconnected source \"%s\" from \"%s\"{%s}\n",
+ P.source->name(), U->label(), P.parm.c_str());
+ }
+ ++count;
+ }
+ }
+
+ return count;
+}
+
+// Local Variables:
+// Mode: c++
+// indent-tabs-mode: nil
+// tab-width: 8
+// c-basic-offset: 8
+// End:
diff --git a/upstream/src/libcn/model.hh b/upstream/src/libcn/model.hh
index d2a29b2..faf109c 100644
--- a/upstream/src/libcn/model.hh
+++ b/upstream/src/libcn/model.hh
@@ -1,12 +1,12 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny
+ * File name: libcn/model.hh
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * Initial version: 2008-09-02
*
- * License: GPL-2+
+ * Purpose: Main model class.
*
- * Initial version: 2008-09-02
- *
- * Class CModel
+ * License: GPL
*/
/*--------------------------------------------------------------------------
@@ -23,6 +23,10 @@ parameters.
#ifndef CNRUN_LIBCN_MODEL_H_
#define CNRUN_LIBCN_MODEL_H_
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
+
#include <list>
#include <vector>
#include <string>
@@ -43,10 +47,6 @@ parameters.
#include "standalone-synapses.hh"
#include "integrate-rk65.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
using namespace std;
@@ -70,9 +70,9 @@ struct SModelOptions {
double //discrete_dt,
integration_dt_max,
integration_dt_min,
- integration_dt_max_cap;
- float sxf_start_delay,
- sxf_sample,
+ integration_dt_cap;
+ double sxf_start_delay,
+ sxf_period,
sdf_sigma;
int verbosely;
@@ -91,8 +91,8 @@ struct SModelOptions {
spike_threshold (0.), spike_lapse (3.),
listen_dt(1.),
//discrete_dt(.5),
- integration_dt_max (.5), integration_dt_min (1e-5), integration_dt_max_cap (5.),
- sxf_start_delay (0.), sxf_sample (0.), sdf_sigma (0.),
+ integration_dt_max (.5), integration_dt_min (1e-5), integration_dt_cap (5.),
+ sxf_start_delay (0.), sxf_period (0.), sdf_sigma (0.),
verbosely (1)
{}
};
@@ -110,7 +110,12 @@ class CModel : public cnrun::stilton::C_verprintf {
SModelOptions
options;
- // Unit lookup
+ // Unit list and lookup
+ vector<C_BaseUnit*>
+ list_units() const
+ { return move(vector<C_BaseUnit*> (units.begin(), units.end())); }
+ vector<C_BaseUnit*>
+ list_units( const string& label) const;
C_BaseUnit *unit_by_label( const string&) const;
C_BaseNeuron *neuron_by_label( const string&) const;
C_BaseSynapse *synapse_by_label( const string&) const;
@@ -164,7 +169,7 @@ class CModel : public cnrun::stilton::C_verprintf {
enum class TSynapseCloningOption { yes, no, };
C_BaseSynapse*
- add_synapse_species( const string& type, const string& src_l, const string& tgt_l,
+ add_synapse_species( const string& type, const string& src, const string& tgt,
double g,
TSynapseCloningOption = TSynapseCloningOption::yes,
TIncludeOption = TIncludeOption::is_last);
@@ -211,70 +216,92 @@ class CModel : public cnrun::stilton::C_verprintf {
// high-level functions to manipulate unit behaviour, set params, & connect sources
struct STagGroup {
string pattern;
- bool enable;
- STagGroup( const string& a, bool b = true)
- : pattern (a), enable (b)
+ enum class TInvertOption { yes, no, };
+ TInvertOption invert_option;
+ STagGroup( const string& a,
+ TInvertOption b = STagGroup::TInvertOption::no)
+ : pattern (a), invert_option (b)
{}
};
struct STagGroupListener : STagGroup {
int bits;
- STagGroupListener( const string& a, bool b, int c = 0)
+ STagGroupListener( const string& a,
+ int c = 0,
+ STagGroup::TInvertOption b = STagGroup::TInvertOption::no)
: STagGroup (a, b), bits (c)
{}
};
- int process_listener_tags( const list<STagGroupListener>&);
+ size_t process_listener_tags( const list<STagGroupListener>&);
struct STagGroupSpikelogger : STagGroup {
double period, sigma, from;
- STagGroupSpikelogger( const string& a, bool b,
- double c = 0., double d = 0., double e = 0.) // defaults disable sdf computation
+ STagGroupSpikelogger( const string& a,
+ double c = 0., double d = 0., double e = 0., // defaults disable sdf computation
+ STagGroup::TInvertOption b = STagGroup::TInvertOption::no)
: STagGroup (a, b), period (c), sigma (d), from (e)
{}
};
- int process_spikelogger_tags( const list<STagGroupSpikelogger>&);
- int process_putout_tags( const list<STagGroup>&);
+ size_t process_spikelogger_tags( const list<STagGroupSpikelogger>&);
+ size_t process_putout_tags( const list<STagGroup>&);
struct STagGroupDecimate : STagGroup {
float fraction;
STagGroupDecimate( const string& a, double c)
- : STagGroup (a), fraction (c)
+ : STagGroup (a, TInvertOption::no), fraction (c)
{}
};
- int process_decimate_tags( const list<STagGroupDecimate>&);
+ size_t process_decimate_tags( const list<STagGroupDecimate>&);
struct STagGroupNeuronParmSet : STagGroup {
string parm;
double value;
- STagGroupNeuronParmSet( const string& a, bool b, const string& c, double d) // b == false to revert to stock
- : STagGroup (a, b), parm (c), value (d)
+ STagGroupNeuronParmSet( const string& a,
+ const string& c, double d,
+ STagGroup::TInvertOption b = STagGroup::TInvertOption::no)
+ : STagGroup (a, b),
+ parm (c), value (d)
{}
};
struct STagGroupSynapseParmSet : STagGroupNeuronParmSet {
string target;
- STagGroupSynapseParmSet( const string& a, const string& z, bool b, const string& c, double d)
- : STagGroupNeuronParmSet (a, b, c, d), target (z)
+ STagGroupSynapseParmSet( const string& a,
+ const string& z, const string& c, double d,
+ STagGroup::TInvertOption b = STagGroup::TInvertOption::no)
+ : STagGroupNeuronParmSet (a, c, d, b), target (z)
{}
};
- int process_paramset_static_tags( const list<STagGroupNeuronParmSet>&);
- int process_paramset_static_tags( const list<STagGroupSynapseParmSet>&);
+ size_t process_paramset_static_tags( const list<STagGroupNeuronParmSet>&);
+ size_t process_paramset_static_tags( const list<STagGroupSynapseParmSet>&);
struct STagGroupSource : STagGroup {
string parm;
C_BaseSource *source;
- STagGroupSource( const string& a, bool b, const string& c, C_BaseSource *d) // b == false to revert to stock
+ STagGroupSource( const string& a,
+ const string& c, C_BaseSource *d,
+ STagGroup::TInvertOption b = STagGroup::TInvertOption::no) // b == false to revert to stock
: STagGroup (a, b), parm (c), source (d)
{}
};
- int process_paramset_source_tags( const list<STagGroupSource>&);
+ size_t process_paramset_source_tags( const list<STagGroupSource>&);
C_BaseSource*
source_by_id( const string& id) const
{
- for ( auto& S : sources )
- if ( S->name == id )
+ for ( auto& S : _sources )
+ if ( id == S->name() )
return S;
return nullptr;
}
+ const list<C_BaseSource*>&
+ sources() const
+ { return _sources; }
+ void
+ add_source( C_BaseSource* s)
+ {
+ _sources.push_back( s);
+ }
+ // no (straight) way to delete a source
+
// 5. Running
unsigned advance( double dist, double *cpu_time_p = nullptr) __attribute__ ((hot));
double model_time() const { return V[0]; }
@@ -282,12 +309,18 @@ class CModel : public cnrun::stilton::C_verprintf {
double dt() const { return _integrator->dt; }
double dt_min() const { return _integrator->_dt_min; }
double dt_max() const { return _integrator->_dt_max; }
+ double dt_cap() const { return _integrator->_dt_cap; }
+ void set_dt_min(double v) { _integrator->_dt_min = v; }
+ void set_dt_max(double v) { _integrator->_dt_max = v; }
+ void set_dt_cap(double v) { _integrator->_dt_cap = v; }
- unsigned long cycle() const { return _cycle; }
- const double& model_discrete_time() const { return _discrete_time; }
- const double& discrete_dt() const { return _discrete_dt; }
+ unsigned long cycle() const { return _cycle; }
+ double model_discrete_time() const { return _discrete_time; }
+ double discrete_dt() const { return _discrete_dt; }
// 9. misc
+ gsl_rng *rng() const
+ { return _rng; }
double rng_sample() const
{
return gsl_rng_uniform_pos( _rng);
@@ -356,6 +389,11 @@ class CModel : public cnrun::stilton::C_verprintf {
for ( auto& U : units_with_continuous_sources )
U->apprise_from_sources();
}
+ void make_spikeloggers_sync_history()
+ {
+ for ( auto& N : spikelogging_neurons )
+ N->sync_spikelogging_history();
+ }
static double
model_time( vector<double> &x)
@@ -429,7 +467,7 @@ class CModel : public cnrun::stilton::C_verprintf {
double _discrete_dt;
list<C_BaseSource*>
- sources;
+ _sources;
ofstream
*_dt_logger,
@@ -464,7 +502,7 @@ CIntegrateRK65::fixate()
// various CUnit & CNeuron methods accessing CModel members
// that we want to have inline
-inline const double
+inline double
C_BaseUnit::model_time() const
{
return M->model_time();
@@ -492,7 +530,7 @@ template <class T>
void
C_BaseUnit::attach_source( T *s, TSinkType t, unsigned short idx)
{
- sources.push_back( SSourceInterface<T>( s, t, idx));
+ _sources.push_back( SSourceInterface<T>( s, t, idx));
M->register_unit_with_sources(this);
}
@@ -559,7 +597,8 @@ var_value( size_t v)
}
inline const double&
-C_HostedNeuron::get_var_value( size_t v) const
+C_HostedNeuron::
+get_var_value( size_t v) const
{
return M->V[idx + v];
}
diff --git a/upstream/src/libcn/mx-attr.hh b/upstream/src/libcn/mx-attr.hh
index 5be9a34..e91f10e 100644
--- a/upstream/src/libcn/mx-attr.hh
+++ b/upstream/src/libcn/mx-attr.hh
@@ -1,31 +1,34 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: libcn/mx-attr.hh
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2009-03-31
*
- * License: GPL-2+
- *
- * Initial version: 2009-03-31
+ * Purpose: Interface class for mltiplexing units.
*
+ * License: GPL
*/
-
#ifndef CNRUN_LIBCN_MXATTR_H_
#define CNRUN_LIBCN_MXATTR_H_
-#include <vector>
-
#if HAVE_CONFIG_H && !defined(VERSION)
# include "config.h"
#endif
+#include <vector>
+
using namespace std;
namespace cnrun {
class C_MultiplexingAttributes {
+
protected:
virtual void update_queue() = 0;
vector<double> _kq;
+
public:
double q() const
{
diff --git a/upstream/src/libcn/param-unit-literals.hh b/upstream/src/libcn/param-unit-literals.hh
index 5d73e29..af4c823 100644
--- a/upstream/src/libcn/param-unit-literals.hh
+++ b/upstream/src/libcn/param-unit-literals.hh
@@ -1,19 +1,17 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
+ * File name: libcn/param-units-literals.hh
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * Initial version: 2009-04-11
*
- * License: GPL-2+
- *
- * Initial version: 2009-04-11
+ * Purpose: Fancy unicode symbols for various measurement units.
*
+ * License: GPL
*/
#ifndef CNRUN_LIBCN_PARAMUNITLITERALS_H_
#define CNRUN_LIBCN_PARAMUNITLITERALS_H_
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
#define __CN_PU_CONDUCTANCE "\316\274S/cm\302\262"
#define __CN_PU_RESISTANCE "M\316\251"
#define __CN_PU_POTENTIAL "mV"
diff --git a/upstream/src/libcn/sources.cc b/upstream/src/libcn/sources.cc
index 374a8c6..4800fad 100644
--- a/upstream/src/libcn/sources.cc
+++ b/upstream/src/libcn/sources.cc
@@ -1,44 +1,57 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: libcn/sources.cc
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2010-02-24
*
- * License: GPL-2+
- *
- * Initial version: 2010-02-24
+ * Purpose: External stimulation sources (periodic, tape, noise).
*
+ * License: GPL
*/
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
+
#include <cmath>
#include <sys/time.h>
#include <iostream>
#include <fstream>
#include <limits>
+#include <gsl/gsl_randist.h>
+#include "libstilton/string.hh"
#include "sources.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
using namespace std;
-const char* const cnrun::C_BaseSource::types[] = {
- "Null",
- "Tape",
- "Periodic",
- "Function",
- "Noise",
-};
+const char* const
+cnrun::C_BaseSource::
+type_s( TSourceType type)
+{
+ switch ( type ) {
+ case TSourceType::null: return "Null";
+ case TSourceType::tape: return "Tape";
+ case TSourceType::periodic: return "Periodic";
+ case TSourceType::function: return "Function";
+ case TSourceType::noise: return "Noise";
+ }
+ return "??";
+}
+
cnrun::CSourceTape::
-CSourceTape( const string& id, const string& infname, bool inis_looping)
- : C_BaseSource (id, SRC_TAPE), is_looping (inis_looping)
+CSourceTape (const string& name_, const string& fname_, TSourceLoopingOption is_looping_)
+ : C_BaseSource (name_, TSourceType::tape), is_looping (is_looping_),
+ _fname (fname_)
{
- ifstream ins( infname);
+ ifstream ins (stilton::str::tilda2homedir( _fname).c_str());
if ( !ins.good() ) {
- name = "";
- return;
+ throw stilton::str::sasprintf(
+ "Tape source file (\"%s\") not good", fname_.c_str());
}
skipws(ins);
@@ -47,60 +60,55 @@ CSourceTape( const string& id, const string& infname, bool inis_looping)
ins.ignore( numeric_limits<streamsize>::max(), '\n');
double timestamp, datum;
ins >> timestamp >> datum;
- values.push_back( pair<double,double>(timestamp, datum));
+ _values.push_back( pair<double,double>(timestamp, datum));
}
- if ( values.size() == 0 ) {
- fprintf( stderr, "No usable values in \"%s\"\n", infname.c_str());
+ if ( _values.size() == 0 ) {
+ fprintf( stderr, "No usable values in \"%s\"\n", _fname.c_str());
return;
}
- fname = infname;
- I = values.begin();
+ _I = _values.begin();
}
-double
+
+void
cnrun::CSourceTape::
-operator() ( double t)
+dump( FILE *strm) const
{
- // position I such that *I < t < *(I+1)
- while ( next(I) != values.end() && (I+1)->first < t )
- ++I;
-
- if ( next(I) == values.end() && is_looping )
- I = values.begin();
-
- return I->second;
+ fprintf( strm, "%s (%s) %zu values from %s%s\n",
+ name(), type_s(),
+ _values.size(), _fname.c_str(),
+ (is_looping == TSourceLoopingOption::yes) ? "" : " (looping)");
}
-
-
cnrun::CSourcePeriodic::
-CSourcePeriodic( const string& id, const string& infname, bool inis_looping, double inperiod)
- : C_BaseSource (id, SRC_PERIODIC), is_looping (inis_looping)
+CSourcePeriodic (const string& name_, const string& fname_, TSourceLoopingOption is_looping_,
+ double period_)
+ : C_BaseSource (name_, TSourceType::periodic), is_looping (is_looping_),
+ _fname (fname_),
+ _period (period_)
{
- ifstream ins( infname);
+ ifstream ins( stilton::str::tilda2homedir(fname_).c_str());
if ( !ins.good() ) {
- name = "";
- return;
+ throw stilton::str::sasprintf(
+ "Periodic source file (\"%s\") not good", fname_.c_str());
}
skipws(ins);
while ( ins.peek() == '#' || ins.peek() == '\n' )
ins.ignore( numeric_limits<streamsize>::max(), '\n');
- if ( !isfinite(inperiod) || inperiod <= 0. ) {
- ins >> inperiod;
- if ( !isfinite(inperiod) || inperiod <= 0. ) {
- fprintf( stderr, "Bad interval for \"%s\"\n", infname.c_str());
- name = "";
- return;
+ if ( !isfinite(_period) || _period <= 0. ) {
+ ins >> _period;
+ if ( !isfinite(_period) || _period <= 0. ) {
+ throw stilton::str::sasprintf(
+ "Period undefined for source \"%s\"", _fname.c_str());
}
}
- period = inperiod;
while ( true ) {
while ( ins.peek() == '#' || ins.peek() == '\n' )
@@ -109,31 +117,71 @@ CSourcePeriodic( const string& id, const string& infname, bool inis_looping, dou
ins >> datum;
if ( ins.eof() || !ins.good() )
break;
- values.push_back( datum);
+ _values.push_back( datum);
}
- if ( values.size() < 2 ) {
- fprintf( stderr, "Need at least 2 scheduled values in \"%s\"\n", infname.c_str());
- name = "";
- return;
+ if ( _values.size() < 2 ) {
+ throw stilton::str::sasprintf(
+ "Need at least 2 scheduled values in \"%s\"\n", _fname.c_str());
}
+}
+
+
- fname = infname;
+void
+cnrun::CSourcePeriodic::
+dump( FILE *strm) const
+{
+ fprintf( strm, "%s (%s) %zu values at %g from %s%s\n",
+ name(), type_s(),
+ _values.size(), _period, _fname.c_str(),
+ (is_looping == TSourceLoopingOption::yes) ? "" : " (looping)");
}
-const char * const cnrun::distribution_names[] = { "uniform", "gaussian" };
+void
+cnrun::CSourceFunction::
+dump( FILE *strm) const
+{
+ fprintf( strm, "%s (%s) (function)\n",
+ name(), type_s());
+}
+
+
+
+const char* const
+cnrun::CSourceNoise::
+distribution_s( TDistribution type)
+{
+ switch ( type ) {
+ case TDistribution::uniform: return "uniform";
+ case TDistribution::gaussian: return "gaussian";
+ }
+ return "??";
+}
+
+
+cnrun::CSourceNoise::TDistribution
+cnrun::CSourceNoise::
+distribution_by_name( const string& s)
+{
+ for ( auto d : {TDistribution::uniform, TDistribution::gaussian} )
+ if ( s == distribution_s( d) )
+ return d;
+ throw stilton::str::sasprintf( "Invalid distribution name: %s", s.c_str());
+}
+
cnrun::CSourceNoise::
-CSourceNoise( const string& id,
- double in_min, double in_max,
- TDistribution indist_type,
+CSourceNoise (const string& name_,
+ double min_, double max_, double sigma_,
+ TDistribution dist_type_,
int seed)
- : C_BaseSource (id, SRC_NOISE),
- _min (in_min), _max (in_max),
- _sigma (in_max - in_min),
- dist_type (indist_type)
+ : C_BaseSource (name_, TSourceType::noise),
+ _min (min_), _max (max_),
+ _sigma (sigma_),
+ _dist_type (dist_type_)
{
const gsl_rng_type *T;
gsl_rng_env_setup();
@@ -143,16 +191,38 @@ CSourceNoise( const string& id,
gettimeofday( &tp, nullptr);
gsl_rng_default_seed = tp.tv_usec;
}
- rng = gsl_rng_alloc( T);
+ _rng = gsl_rng_alloc( T);
+}
+
+double
+cnrun::CSourceNoise::
+operator() ( double unused)
+{
+ switch ( _dist_type ) {
+ case TDistribution::uniform: return gsl_rng_uniform( _rng) * (_max - _min) + _min;
+ case TDistribution::gaussian: return gsl_ran_gaussian( _rng, _sigma) + (_max - _min)/2;
+ }
+ return 42.;
}
cnrun::CSourceNoise::
-~CSourceNoise()
+~CSourceNoise ()
{
- gsl_rng_free( rng);
+ gsl_rng_free( _rng);
}
+
+void
+cnrun::CSourceNoise::
+dump( FILE *strm) const
+{
+ fprintf( strm, "%s (%s) %s in range %g:%g (sigma = %g)\n",
+ name(), type_s(),
+ distribution_s(_dist_type), _min, _max, _sigma);
+}
+
+
// Local Variables:
// Mode: c++
// indent-tabs-mode: nil
diff --git a/upstream/src/libcn/sources.hh b/upstream/src/libcn/sources.hh
index 207f44c..88533b8 100644
--- a/upstream/src/libcn/sources.hh
+++ b/upstream/src/libcn/sources.hh
@@ -1,167 +1,190 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
+ * File name: libcn/sources.hh
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2010-02-24
*
- * License: GPL-2+
- *
- * Initial version: 2010-02-24
+ * Purpose: External stimulation sources (periodic, tape, noise).
*
+ * License: GPL
*/
#ifndef CNRUN_LIBCN_SOURCES_H_
#define CNRUN_LIBCN_SOURCES_H_
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
+
+#include <cstdio>
#include <string>
#include <vector>
-#include <utility>
+#include <gsl/gsl_rng.h>
+#include "libstilton/lang.hh"
#include "forward-decls.hh"
-#include "gsl/gsl_rng.h"
-#include "gsl/gsl_randist.h"
-
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
using namespace std;
namespace cnrun {
+enum class TSourceType { null, tape, periodic, function, noise };
+
class C_BaseSource {
+
+ DELETE_DEFAULT_METHODS (C_BaseSource)
+
public:
- enum TSourceType { SRC_NULL, SRC_TAPE, SRC_PERIODIC, SRC_FUNCTION, SRC_NOISE };
- static const char * const types[];
+ static const char* const type_s( TSourceType);
- string name;
- TSourceType type;
- C_BaseSource( const string& id, TSourceType intype = SRC_NULL)
- : name (id), type (intype)
+ C_BaseSource (const string& name_, TSourceType type_)
+ : _name (name_), _type (type_)
{}
virtual ~C_BaseSource()
{}
+ const char* name() const
+ { return _name.c_str(); }
+ const TSourceType type() const
+ { return _type; }
+ const char* type_s() const
+ { return type_s(_type); }
+
virtual double operator() ( double)
{ return 0.; }
virtual bool is_periodic()
{ return false; }
+
bool operator== ( const C_BaseSource &rv)
- { return name == rv.name; }
+ { return _name == rv._name; }
bool operator== ( const string& rv)
- { return name == name; }
- virtual void dump( FILE *strm = stdout)
- {
- fprintf( strm, "%s (%s)\n", name.c_str(), types[type]);
- }
+ { return _name == rv; }
+
+ virtual void dump( FILE *strm = stdout) const = 0;
+
+ protected:
+ string _name;
+ TSourceType
+ _type;
};
+enum class TSourceLoopingOption { yes, no };
class CSourceTape : public C_BaseSource {
- private:
- CSourceTape();
- public:
- string fname;
- vector<pair<double, double>> values;
- bool is_looping;
- CSourceTape( const string& id, const string& infname, bool is_looping = false);
+ DELETE_DEFAULT_METHODS (CSourceTape)
+
+ public:
+ CSourceTape (const string& name_, const string& fname_,
+ TSourceLoopingOption = TSourceLoopingOption::no);
- double operator() ( double at);
+ TSourceLoopingOption is_looping;
- void dump( FILE *strm = stdout)
+ double operator() ( double at)
{
- fprintf( strm, "%s (%s) %zu values from %s%s\n",
- name.c_str(), types[type],
- values.size(), fname.c_str(), is_looping ? "" : " (looping)");
+ while ( next(_I) != _values.end() && next(_I)->first < at )
+ ++_I;
+
+ if ( next(_I) == _values.end() && is_looping == TSourceLoopingOption::yes )
+ _I = _values.begin();
+
+ return _I->second;
}
+
+ void dump( FILE *strm = stdout) const;
+
private:
- vector<pair<double, double>>::iterator I;
+ string _fname;
+ vector<pair<double, double>> _values;
+ vector<pair<double, double>>::iterator _I;
};
+
class CSourcePeriodic : public C_BaseSource {
- private:
- CSourcePeriodic();
+
+ DELETE_DEFAULT_METHODS (CSourcePeriodic)
+
public:
- string fname;
- vector<double> values;
- double period;
- bool is_looping;
+ CSourcePeriodic (const string& name_, const string& fname_,
+ TSourceLoopingOption,
+ double period);
- CSourcePeriodic( const string& id, const string& fname, bool is_looping = false, double period = 0.);
+ TSourceLoopingOption is_looping;
double operator() ( double at)
{
- size_t i_abs = (size_t)(at / period),
- i_eff = is_looping
- ? i_abs % values.size()
- : min (i_abs, values.size() - 1);
- return values[i_eff];
+ size_t i_abs = (size_t)(at / _period),
+ i_eff = (is_looping == TSourceLoopingOption::yes)
+ ? i_abs % _values.size()
+ : min (i_abs, _values.size() - 1);
+ return _values[i_eff];
}
- void dump( FILE *strm = stdout)
- {
- fprintf( strm, "%s (%s) %zu values at %g from %s%s\n",
- name.c_str(), types[type],
- values.size(), period, fname.c_str(), is_looping ? "" : " (looping)");
- }
+ void dump( FILE *strm = stdout) const;
bool is_periodic()
{ return true; }
+ double period() const
+ { return _period; }
+
+ private:
+ string _fname;
+ vector<double> _values;
+ double _period;
};
+
class CSourceFunction : public C_BaseSource {
- private:
- CSourceFunction();
- public:
- double (*function)( double at);
+// not useful in Lua
+
+ DELETE_DEFAULT_METHODS (CSourceFunction)
- CSourceFunction( const string& id, double (*f)(double))
- : C_BaseSource (id, SRC_FUNCTION), function (f)
+ public:
+ CSourceFunction (const string& name_, double (*function_)(double))
+ : C_BaseSource (name_, TSourceType::function), _function (function_)
{}
double operator() ( double at)
{
- return function( at);
+ return _function( at);
}
+
+ void dump( FILE *strm = stdout) const;
+
+ private:
+ double (*_function)( double at);
};
-extern const char * const distribution_names[];
class CSourceNoise : public C_BaseSource {
- private:
- CSourceNoise();
+
+ DELETE_DEFAULT_METHODS (CSourceNoise)
+
public:
- double _min, _max, _sigma;
- enum TDistribution {
- SOURCE_RANDDIST_UNIFORM,
- SOURCE_RANDDIST_GAUSSIAN,
- };
- TDistribution dist_type;
- gsl_rng *rng;
-
- CSourceNoise( const string& id, double in_min = 0., double in_max = 1.,
- TDistribution = SOURCE_RANDDIST_UNIFORM,
+ enum class TDistribution { uniform, gaussian, };
+ static const char * const distribution_s( TDistribution);
+ static TDistribution distribution_by_name( const string&);
+
+ CSourceNoise (const string& name_, double min_ = 0., double max_ = 1.,
+ double sigma_ = 1.,
+ TDistribution = TDistribution::uniform,
int seed = 0);
- ~CSourceNoise();
+ ~CSourceNoise ();
- double operator() ( double unused) const
- {
- switch ( dist_type ) {
- case SOURCE_RANDDIST_UNIFORM: return gsl_rng_uniform( rng) * (_max - _min) + _min;
- case SOURCE_RANDDIST_GAUSSIAN: return gsl_ran_gaussian( rng, _sigma) + (_max - _min)/2;
- default: return 42.;
- }
- }
+ double operator() ( double unused);
- void dump( FILE *strm = stdout)
- {
- fprintf( strm, "%s (%s) %s in range %g:%g (sigma = %g)\n",
- name.c_str(), types[type],
- distribution_names[dist_type], _min, _max, _sigma);
- }
+ void dump( FILE *strm = stdout) const;
+
+ private:
+ double _min, _max, _sigma;
+ TDistribution _dist_type;
+ gsl_rng *_rng;
};
}
diff --git a/upstream/src/libcn/standalone-attr.hh b/upstream/src/libcn/standalone-attr.hh
index 832ae7a..a86eefd 100644
--- a/upstream/src/libcn/standalone-attr.hh
+++ b/upstream/src/libcn/standalone-attr.hh
@@ -1,50 +1,56 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: libcn/standalone-attr.hh
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2009-03-31
*
- * License: GPL-2+
- *
- * Initial version: 2009-03-31
+ * Purpose: Interface class for standalone units.
*
+ * License: GPL
*/
-
-
-#ifndef LIBCN_STANDALONE_ATTR_H
-#define LIBCN_STANDALONE_ATTR_H
-
-#include <vector>
+#ifndef CNRUN_LIBCN_STANDALONEATTR_H_
+#define CNRUN_LIBCN_STANDALONEATTR_H_
#if HAVE_CONFIG_H && !defined(VERSION)
# include "config.h"
#endif
+#include <vector>
+
using namespace std;
namespace cnrun {
class C_StandaloneAttributes {
+ friend class CModel;
protected:
- C_StandaloneAttributes()
- {}
- C_StandaloneAttributes( size_t nvars)
- {
- V.resize( nvars), V_next.resize( nvars);
- }
+ C_StandaloneAttributes (size_t nvars)
+ {
+ V.resize( nvars);
+ V_next.resize( nvars);
+ }
- vector<double> V, V_next;
+ vector<double>
+ V,
+ V_next;
- friend class CModel;
private:
- virtual void preadvance()
- {}
- void fixate()
- { V = V_next; }
+ virtual void preadvance()
+ {}
+ void fixate()
+ { V = V_next; }
};
}
#endif
-// EOF
+// Local Variables:
+// Mode: c++
+// indent-tabs-mode: nil
+// tab-width: 8
+// c-basic-offset: 8
+// End:
diff --git a/upstream/src/libcn/standalone-neurons.cc b/upstream/src/libcn/standalone-neurons.cc
index 2e7099c..c84f2e8 100644
--- a/upstream/src/libcn/standalone-neurons.cc
+++ b/upstream/src/libcn/standalone-neurons.cc
@@ -1,13 +1,19 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: libcn/standalone-neurons.cc
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2009-04-08
*
- * License: GPL-2+
- *
- * Initial version: 2009-04-08
+ * Purpose: standalone neurons (those not having state vars
+ * on model's integration vector)
*
+ * License: GPL
*/
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
#include <iostream>
@@ -16,14 +22,10 @@
#include "types.hh"
#include "model.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
cnrun::C_StandaloneNeuron::
-C_StandaloneNeuron( TUnitType intype, const string& inlabel,
+C_StandaloneNeuron (TUnitType intype, const string& inlabel,
double x, double y, double z,
CModel *inM, int s_mask)
: C_BaseNeuron( intype, inlabel, x, y, z, inM, s_mask),
@@ -333,7 +335,7 @@ const char* const cnrun::__CN_VarSyms_NeuronMap[] = {
cnrun::CNeuronMap::
-CNeuronMap( const string& inlabel, double x, double y, double z, CModel *inM, int s_mask)
+CNeuronMap (const string& inlabel, double x, double y, double z, CModel *inM, int s_mask)
: C_StandaloneConductanceBasedNeuron( NT_MAP, inlabel, x, y, z, inM, s_mask)
{
if ( inM ) {
diff --git a/upstream/src/libcn/standalone-neurons.hh b/upstream/src/libcn/standalone-neurons.hh
index a0a8715..ff4cbd2 100644
--- a/upstream/src/libcn/standalone-neurons.hh
+++ b/upstream/src/libcn/standalone-neurons.hh
@@ -1,27 +1,29 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: libcn/standalone-neurons.cc
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2008-08-02
*
- * License: GPL-2+
+ * Purpose: standalone neurons (those not having state vars
+ * on model's integration vector)
*
- * Initial version: 2008-08-02
+ * License: GPL
*/
-
-
#ifndef CNRUN_LIBCN_STANDALONENEURONS_H_
#define CNRUN_LIBCN_STANDALONENEURONS_H_
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
+
#include "libstilton/lang.hh"
#include "forward-decls.hh"
#include "base-neuron.hh"
#include "standalone-attr.hh"
#include "mx-attr.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
namespace cnrun {
@@ -31,12 +33,12 @@ class C_StandaloneNeuron
DELETE_DEFAULT_METHODS (C_StandaloneNeuron)
protected:
- C_StandaloneNeuron( TUnitType intype, const string& inlabel,
+ C_StandaloneNeuron (TUnitType intype, const string& inlabel,
double x, double y, double z,
CModel*, int s_mask);
public:
- ~C_StandaloneNeuron();
+ ~C_StandaloneNeuron ();
double &var_value( size_t v) { return V[v]; }
const double &get_var_value( size_t v) const { return V[v]; }
@@ -57,7 +59,7 @@ class C_StandaloneConductanceBasedNeuron
DELETE_DEFAULT_METHODS (C_StandaloneConductanceBasedNeuron)
protected:
- C_StandaloneConductanceBasedNeuron( TUnitType intype, const string& inlabel,
+ C_StandaloneConductanceBasedNeuron (TUnitType intype, const string& inlabel,
double inx, double iny, double inz,
CModel *inM, int s_mask)
: C_StandaloneNeuron (intype, inlabel, inx, iny, inz, inM, s_mask)
@@ -77,7 +79,7 @@ class C_StandaloneRateBasedNeuron
DELETE_DEFAULT_METHODS (C_StandaloneRateBasedNeuron)
protected:
- C_StandaloneRateBasedNeuron( TUnitType intype, const string& inlabel,
+ C_StandaloneRateBasedNeuron (TUnitType intype, const string& inlabel,
double inx, double iny, double inz,
CModel *inM, int s_mask)
: C_StandaloneNeuron (intype, inlabel, inx, iny, inz, inM, s_mask)
@@ -97,7 +99,7 @@ class C_StandaloneRateBasedNeuron
class CNeuronHH_r
: public C_StandaloneRateBasedNeuron {
- DELETE_DEFAULT_METHODS(CNeuronHH_r)
+ DELETE_DEFAULT_METHODS (CNeuronHH_r)
public:
CNeuronHH_r( const string& inlabel,
@@ -131,11 +133,12 @@ class COscillatorPoisson
COscillatorPoisson( const string& inlabel,
double x, double y, double z,
CModel *inM, int s_mask = 0)
- : C_StandaloneConductanceBasedNeuron( NT_POISSON, inlabel, x, y, z, inM, s_mask)
+ : C_StandaloneConductanceBasedNeuron (NT_POISSON, inlabel, x, y, z, inM, s_mask)
{
// need _spikelogger_agent's fields even when no spikelogging is done
- _spikelogger_agent = new SSpikeloggerService( static_cast<C_BaseNeuron*>(this),
- 0 | CN_KL_PERSIST | CN_KL_IDLE);
+ _spikelogger_agent = new SSpikeloggerService(
+ static_cast<C_BaseNeuron*>(this),
+ 0 | CN_KL_PERSIST | CN_KL_IDLE);
}
enum {
@@ -162,14 +165,15 @@ class COscillatorDotPoisson
DELETE_DEFAULT_METHODS (COscillatorDotPoisson)
public:
- COscillatorDotPoisson( const string& inlabel,
+ COscillatorDotPoisson (const string& inlabel,
double x, double y, double z,
CModel *inM, int s_mask = 0)
: C_StandaloneConductanceBasedNeuron( NT_DOTPOISSON, inlabel, x, y, z, inM, s_mask)
{
// need _spikelogger_agent's fields even when no spikelogging is done
- _spikelogger_agent = new SSpikeloggerService( static_cast<C_BaseNeuron*>(this),
- 0 | CN_KL_PERSIST | CN_KL_IDLE);
+ _spikelogger_agent = new SSpikeloggerService(
+ static_cast<C_BaseNeuron*>(this),
+ 0 | CN_KL_PERSIST | CN_KL_IDLE);
}
enum {
@@ -195,10 +199,10 @@ class CNeuronDotPulse
DELETE_DEFAULT_METHODS (CNeuronDotPulse)
public:
- CNeuronDotPulse( const string& inlabel,
+ CNeuronDotPulse (const string& inlabel,
double x, double y, double z,
CModel *inM, int s_mask = 0)
- : C_StandaloneConductanceBasedNeuron( NT_DOTPULSE, inlabel, x, y, z, inM, s_mask)
+ : C_StandaloneConductanceBasedNeuron (NT_DOTPULSE, inlabel, x, y, z, inM, s_mask)
{}
enum { _f_, _Vrst_, _Vfir_ };
@@ -227,7 +231,7 @@ class CNeuronMap
public:
static const constexpr double fixed_dt = 0.1;
- CNeuronMap( const string& inlabel, double x, double y, double z,
+ CNeuronMap (const string& inlabel, double x, double y, double z,
CModel*, int s_mask = 0);
enum {
diff --git a/upstream/src/libcn/standalone-synapses.cc b/upstream/src/libcn/standalone-synapses.cc
index 8b28d7d..e8ccb6b 100644
--- a/upstream/src/libcn/standalone-synapses.cc
+++ b/upstream/src/libcn/standalone-synapses.cc
@@ -1,30 +1,29 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: libcn/standalone-synapses.cc
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2009-04-08
*
- * License: GPL-2+
- *
- * Initial version: 2009-04-08
+ * Purpose: standalone synapses.
*
+ * License: GPL
*/
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
#include <iostream>
#include "param-unit-literals.hh"
-
#include "standalone-synapses.hh"
#include "types.hh"
#include "model.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
-
cnrun::C_StandaloneSynapse::
-C_StandaloneSynapse( TUnitType intype,
+C_StandaloneSynapse (TUnitType intype,
C_BaseNeuron* insource, C_BaseNeuron* intarget,
double ing, CModel* inM, int s_mask)
: C_BaseSynapse (intype, insource, intarget, ing, inM, s_mask),
@@ -63,7 +62,7 @@ const char* const cnrun::__CN_ParamSyms_SynapseMap[] = {
cnrun::CSynapseMap::
-CSynapseMap( C_BaseNeuron *insource, C_BaseNeuron *intarget,
+CSynapseMap (C_BaseNeuron *insource, C_BaseNeuron *intarget,
double ing, CModel *inM, int s_mask, TUnitType alt_type)
: C_StandaloneSynapse( alt_type, insource, intarget, ing, inM, s_mask),
_source_was_spiking (false)
diff --git a/upstream/src/libcn/standalone-synapses.hh b/upstream/src/libcn/standalone-synapses.hh
index 48d03c3..391ff6f 100644
--- a/upstream/src/libcn/standalone-synapses.hh
+++ b/upstream/src/libcn/standalone-synapses.hh
@@ -1,17 +1,22 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: libcn/standalone-synapses.hh
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2008-08-02
*
- * License: GPL-2+
- *
- * Initial version: 2008-08-02
+ * Purpose: standalone synapses (those not having state vars
+ * on model's integration vector)
*
+ * License: GPL
*/
+#ifndef CNRUN_LIBCN_STANDALONESYNAPSES_H_
+#define CNRUN_LIBCN_STANDALONESYNAPSES_H_
-
-#ifndef CNRUN_LIBCN_STANDALONE_SYNAPSES_H_
-#define CNRUN_LIBCN_STANDALONE_SYNAPSES_H_
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
#include <iostream>
@@ -20,10 +25,6 @@
#include "standalone-attr.hh"
#include "mx-attr.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
namespace cnrun {
@@ -32,14 +33,14 @@ class CModel;
class C_StandaloneSynapse
: public C_BaseSynapse, public C_StandaloneAttributes {
- private:
- C_StandaloneSynapse();
+ DELETE_DEFAULT_METHODS (C_StandaloneSynapse)
+
protected:
- C_StandaloneSynapse( TUnitType, C_BaseNeuron *insource, C_BaseNeuron *intarget,
+ C_StandaloneSynapse (TUnitType, C_BaseNeuron *insource, C_BaseNeuron *intarget,
double ing, CModel*, int s_mask = 0);
public:
- ~C_StandaloneSynapse();
+ ~C_StandaloneSynapse ();
double &var_value( size_t v) { return V[v]; }
const double &get_var_value( size_t v) const { return V[v]; }
@@ -62,13 +63,12 @@ class C_StandaloneSynapse
class CSynapseMap
: public C_StandaloneSynapse {
- private:
- CSynapseMap();
+ DELETE_DEFAULT_METHODS (CSynapseMap)
public:
static constexpr double fixed_dt = 0.1;
- CSynapseMap( C_BaseNeuron *insource, C_BaseNeuron *intarget,
+ CSynapseMap (C_BaseNeuron *insource, C_BaseNeuron *intarget,
double ing, CModel*, int s_mask = 0, TUnitType alt_type = YT_MAP);
void preadvance(); // defined inline in model.h
@@ -96,6 +96,8 @@ class CSynapseMap
class CSynapseMxMap
: public CSynapseMap, public C_MultiplexingAttributes {
+ DELETE_DEFAULT_METHODS (CSynapseMxMap)
+
public:
static constexpr double fixed_dt = 0.1;
diff --git a/upstream/src/libcn/types.cc b/upstream/src/libcn/types.cc
index 91eb67e..b804873 100644
--- a/upstream/src/libcn/types.cc
+++ b/upstream/src/libcn/types.cc
@@ -1,28 +1,26 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny
+ * File name: libcn/types.cc
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2008-09-23
*
- * License: GPL-2+
+ * Purpose: CN global unit descriptors
*
- * Initial version: 2008-09-23
- *
- * CN global unit descriptors
+ * License: GPL
*/
-
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
#include <cstdio>
#include <cstring>
#include <iostream>
#include "libstilton/string.hh"
-
#include "types.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
using namespace std;
diff --git a/upstream/src/libcn/types.hh b/upstream/src/libcn/types.hh
index 1d0300e..6518868 100644
--- a/upstream/src/libcn/types.hh
+++ b/upstream/src/libcn/types.hh
@@ -1,15 +1,15 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * File name: cnrun/types.hh
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * building on original work by Thomas Nowotny <tnowotny at ucsd.edu>
+ * Initial version: 2008-08-02
*
- * License: GPL-2+
+ * Purpose: Enumerated type for unit ids, and a structure describing a unit type.
*
- * Initial version: 2008-08-02
- *
- * Enumerated type for unit ids, and a structure describing a unit type
+ * License: GPL
*/
-
//#define CN_WANT_MORE_NEURONS
#ifndef CNRUN_LIBCN_TYPES_H_
@@ -19,8 +19,8 @@
# include "config.h"
#endif
-
using namespace std;
+
namespace cnrun {
enum TUnitType {
diff --git a/upstream/src/libstilton/containers.hh b/upstream/src/libstilton/containers.hh
index 95c108f..df25c56 100644
--- a/upstream/src/libstilton/containers.hh
+++ b/upstream/src/libstilton/containers.hh
@@ -12,16 +12,16 @@
#ifndef CNRUN_LIBSTILTON_CONTAINERS_H_
#define CNRUN_LIBSTILTON_CONTAINERS_H_
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
+
#include <list>
#include <forward_list>
#include <vector>
#include <map>
#include <algorithm>
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
using namespace std;
namespace cnrun {
diff --git a/upstream/src/libstilton/exprparser.cc b/upstream/src/libstilton/exprparser.cc
index e694e4a..f0328bc 100644
--- a/upstream/src/libstilton/exprparser.cc
+++ b/upstream/src/libstilton/exprparser.cc
@@ -1,13 +1,17 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
+ * File name: libstilton/exprparser.cc
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
+ * Initial version: 2008-12-02
*
- * License: GPL-2+
+ * Purpose: Expression parser
*
- * Initial version: 2008-12-02
- *
- * Expression parser
+ * License: GPL
*/
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
#include <cstdlib>
#include <cstdio>
@@ -16,10 +20,6 @@
#include "exprparser.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
using namespace std;
diff --git a/upstream/src/libstilton/exprparser.hh b/upstream/src/libstilton/exprparser.hh
index ea2de3c..f5e3630 100644
--- a/upstream/src/libstilton/exprparser.hh
+++ b/upstream/src/libstilton/exprparser.hh
@@ -1,25 +1,26 @@
/*
- * Author: Andrei Zavada <johnhommer at gmail.com>
- *
- * License: GPL-2+
- *
+ * File name: libstilton/exprparser.hh
+ * Project: cnrun
+ * Author: Andrei Zavada <johnhommer at gmail.com>
* Initial version: 2008-12-02
*
- * An expression parser
+ * Purpose: Expression parser
+ *
+ * License: GPL
*/
#ifndef CNRUN_LIBSTILTON_EXPRPARSER_H_
#define CNRUN_LIBSTILTON_EXPRPARSER_H_
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
+
#include <cmath>
#include <cstring>
#include <string>
#include <list>
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
using namespace std;
namespace cnrun {
diff --git a/upstream/src/libstilton/libcommon.cc b/upstream/src/libstilton/libcommon.cc
index 3a7a64b..a7f7073 100644
--- a/upstream/src/libstilton/libcommon.cc
+++ b/upstream/src/libstilton/libcommon.cc
@@ -9,32 +9,31 @@
* License: GPL
*/
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
#include <cmath>
#include <cstring>
#include <string>
#include <list>
-#include <stdarg.h>
+#include <cstdarg>
+#include <cerrno>
#include <unistd.h>
-#include <errno.h>
#include "string.hh"
#include "alg.hh"
#include "misc.hh"
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
using namespace std;
using namespace cnrun::stilton;
void
C_verprintf::
-vp( int level, const char* fmt, ...)
+vp( int level, const char* fmt, ...) const
{
if ( level > verbose_threshold() ) {
va_list ap;
@@ -46,7 +45,7 @@ vp( int level, const char* fmt, ...)
void
C_verprintf::
-vp( int level, FILE* f, const char* fmt, ...)
+vp( int level, FILE* f, const char* fmt, ...) const
{
if ( level > verbose_threshold() ) {
va_list ap;
@@ -321,7 +320,6 @@ double_dot_aligned_s( double val, int int_width, int frac_width)
return {buf};
}
-
// Local Variables:
// Mode: c++
// indent-tabs-mode: nil
diff --git a/upstream/src/libstilton/misc.hh b/upstream/src/libstilton/misc.hh
index 6e3077e..bd74ac4 100644
--- a/upstream/src/libstilton/misc.hh
+++ b/upstream/src/libstilton/misc.hh
@@ -12,12 +12,12 @@
#ifndef CNRUN_LIBSTILTON_MISC_H_
#define CNRUN_LIBSTILTON_MISC_H_
-#include <unistd.h>
-
#if HAVE_CONFIG_H && !defined(VERSION)
# include "config.h"
#endif
+#include <unistd.h>
+
using namespace std;
namespace cnrun {
@@ -25,8 +25,8 @@ namespace stilton {
struct C_verprintf {
virtual int verbose_threshold() const = 0;
- void vp( int, const char* fmt, ...) __attribute__ ((format (printf, 3, 4)));
- void vp( int, FILE*, const char* fmt, ...) __attribute__ ((format (printf, 4, 5)));
+ void vp( int, const char* fmt, ...) const __attribute__ ((format (printf, 3, 4)));
+ void vp( int, FILE*, const char* fmt, ...) const __attribute__ ((format (printf, 4, 5)));
};
diff --git a/upstream/src/libstilton/string.hh b/upstream/src/libstilton/string.hh
index 52eeb8a..82f3171 100644
--- a/upstream/src/libstilton/string.hh
+++ b/upstream/src/libstilton/string.hh
@@ -9,8 +9,12 @@
* License: GPL
*/
-#ifndef _CNRUN_LIBSTILTON_STRING_H
-#define _CNRUN_LIBSTILTON_STRING_H
+#ifndef CNRUN_LIBSTILTON_STRING_H_
+#define CNRUN_LIBSTILTON_STRING_H_
+
+#if HAVE_CONFIG_H && !defined(VERSION)
+# include "config.h"
+#endif
#include <cstdarg>
#include <cstring>
@@ -18,10 +22,6 @@
#include <list>
#include <sstream>
-#if HAVE_CONFIG_H && !defined(VERSION)
-# include "config.h"
-#endif
-
using namespace std;
namespace cnrun {
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/cnrun.git
More information about the debian-med-commit
mailing list