[med-svn] [Git][med-team/bali-phy][upstream] New upstream version 3.1.1+dfsg
Benjamin Redelings
gitlab at salsa.debian.org
Sat May 5 15:01:12 BST 2018
Benjamin Redelings pushed to branch upstream at Debian Med / bali-phy
Commits:
6d3ce2e5 by Benjamin Redelings at 2018-05-05T08:01:56-04:00
New upstream version 3.1.1+dfsg
- - - - -
8 changed files:
- functions/gtr.json
- functions/hky85.json
- functions/tn93.json
- meson.build
- src/models/rules.cc
- src/models/setup.H
- src/models/setup.cc
- src/startup/A-T-model.cc
Changes:
=====================================
functions/gtr.json
=====================================
--- a/functions/gtr.json
+++ b/functions/gtr.json
@@ -1,6 +1,7 @@
{
"name": "gtr",
"title":"The GTR exchangability matrix",
+ "deprecated-synonyms": ["GTR"],
"result_type": "RevCTMC[a]",
"call": "SModel.gtr'[sym,pi,a]",
"citation":"Tavare, 1986",
=====================================
functions/hky85.json
=====================================
--- a/functions/hky85.json
+++ b/functions/hky85.json
@@ -1,6 +1,7 @@
{
"name": "hky85",
"title": "The Hasegawa-Kishino-Yano (1985) nucleotide rate matrix",
+ "deprecated-synonyms": ["HKY","hky","HKY85"],
"result_type": "RevCTMC[a]",
"constraints": ["Nucleotides[a]"],
"citation":{"type": "article",
=====================================
functions/tn93.json
=====================================
--- a/functions/tn93.json
+++ b/functions/tn93.json
@@ -1,6 +1,7 @@
{
"name": "tn93",
"title": "The Tamura-Nei (1993) nucleotide rate matrix",
+ "deprecated-synonyms": ["TN","TrN","tn","TN93","trn"],
"result_type": "RevCTMC[a]",
"citation":{"type": "article",
"title": "Estimation of the number of nucleotide substitutions in the control region of mitochondrial DNA in humans and chimpanzees.",
=====================================
meson.build
=====================================
--- a/meson.build
+++ b/meson.build
@@ -1,5 +1,5 @@
project('bali-phy', ['cpp','c'],
- version: '3.1',
+ version: '3.1.1',
default_options : [
'buildtype=release',
'cpp_std=c++14'
@@ -130,14 +130,23 @@ subdir('src')
subdir('doc')
+# Run internal tests
+run_tests = find_program(join_paths(meson.source_root(),'tests/run-tests.py'))
+
test('bali-phy testsuite',
- find_program('tests/run-tests.py'),
+ run_tests,
timeout: 600,
workdir: join_paths(meson.source_root(),'tests'),
args:[baliphy.full_path(), packagepath])
-test('testiphy (likelihood testsuite)',
- find_program('testiphy/testiphy'),
- timeout: 600,
- workdir: join_paths(meson.source_root(),'testiphy'),
- args:[baliphy.full_path(), packagepath])
+# Run external likelihood testsuite
+
+testiphy = find_program(join_paths(meson.source_root(),'testiphy/testiphy'), required: false)
+
+if testiphy.found()
+ test('testiphy (likelihood testsuite)',
+ testiphy,
+ timeout: 600,
+ workdir: join_paths(meson.source_root(),'testiphy'),
+ args:[baliphy.full_path(), packagepath])
+endif
=====================================
src/models/rules.cc
=====================================
--- a/src/models/rules.cc
+++ b/src/models/rules.cc
@@ -190,7 +190,7 @@ optional<Rule> Rules::get_rule_for_func(const string& s) const
else if (auto syn = it->second.get_optional<string>("synonym"))
return get_rule_for_func(*syn);
else if (auto syn = it->second.get_optional<string>("deprecated-synonym"))
- throw myexception()<<"Error: the function '"<<s<<"' is now called '"<<*syn<<"'";
+ throw myexception()<<"I don't recognize '"<<s<<"'. Perhaps you meant '"<<*syn<<"'?";
else
return it->second;
}
=====================================
src/models/setup.H
=====================================
--- a/src/models/setup.H
+++ b/src/models/setup.H
@@ -23,6 +23,8 @@ public:
std::vector<pretty_model_t> terms;
std::string show(const Rules& R, bool = true) const;
+ std::string show_main(const Rules&, bool = true) const;
+ std::string show_extracted(const Rules&) const;
pretty_model_t(const ptree& m);
};
@@ -40,6 +42,8 @@ public:
std::string show(const Rules&, bool = true) const;
std::string show_pretty(const Rules&, bool = true) const;
+ std::string show_main(const Rules&, bool = true) const;
+ std::string show_extracted(const Rules&) const;
model_t() = default;
model_t(const ptree&,const ptree&, const std::set<term_t>&, const expression_ref&);
=====================================
src/models/setup.cc
=====================================
--- a/src/models/setup.cc
+++ b/src/models/setup.cc
@@ -118,6 +118,18 @@ string model_t::show_pretty(const Rules& rules, bool top) const
return p.show(rules, not top);
}
+string model_t::show_main(const Rules& rules, bool top) const
+{
+ auto p = pretty_model_t(description);
+ return p.show_main(rules, top);
+}
+
+string model_t::show_extracted(const Rules& rules) const
+{
+ auto p = pretty_model_t(description);
+ return p.show_extracted(rules);
+}
+
model_t::model_t(const ptree& d, const ptree&t, const std::set<term_t>& c, const expression_ref& e)
:description(d), type(t), constraints(c), expression(e)
{
@@ -649,15 +661,11 @@ vector<pair<string, ptree>> extract_terms(ptree& m)
#include "startup/help.hh"
-string pretty_model_t::show(const Rules& R, bool top) const
+string pretty_model_t::show_extracted(const Rules& R) const
{
const int indent = 4;
string output;
- if (top)
- output = unparse(extract_value(main), R);
- else
- output = show_model(extract_value(main), R);
for(int i=0; i<terms.size(); i++)
{
@@ -668,6 +676,19 @@ string pretty_model_t::show(const Rules& R, bool top) const
return output;
}
+string pretty_model_t::show_main(const Rules& R, bool top) const
+{
+ if (top)
+ return unparse(extract_value(main), R);
+ else
+ return show_model(extract_value(main), R);
+}
+
+string pretty_model_t::show(const Rules& R, bool top) const
+{
+ return show_main(R,top) + show_extracted(R);
+}
+
pretty_model_t::pretty_model_t(const ptree& m)
:main(m)
{
=====================================
src/startup/A-T-model.cc
=====================================
--- a/src/startup/A-T-model.cc
+++ b/src/startup/A-T-model.cc
@@ -142,6 +142,13 @@ json optional_to_json(const boost::optional<T>& o)
return *o;
}
+
+// FIXME - maybe we should try to make a single giant model so that we get S1/parameter
+// exactly when this occurs for the logged parameter names, themselves.
+//
+// This would also help it we have some variables that are outside of models, because
+// they are shared between them.
+
json log_summary(ostream& out_cache, ostream& out_screen,ostream& out_both,
const vector<model_t>& IModels, const vector<model_t>& SModels,
const vector<model_t>& ScaleModels,
@@ -151,30 +158,61 @@ json log_summary(ostream& out_cache, ostream& out_screen,ostream& out_both,
json info;
json partitions;
+ json tree;
+ if (P.t().n_branches() > 1)
+ {
+ out_both<<"T:topology ~ uniform on tree topologies\n";
+ tree["topology"] = "uniform";
+ }
+
+ if (P.t().n_branches() > 0)
+ {
+ out_both<<"T:lengths "<<branch_length_model.show(rules)<<endl<<endl;
+ tree["lengths"] = branch_length_model.show(rules, false);
+ }
+
//-------- Log some stuff -----------//
vector<string> filenames = args["align"].as<vector<string> >();
for(int i=0;i<P.n_data_partitions();i++)
{
- string a_name = P[i].get_alphabet().name;
+ json partition;
- out_cache<<"data"<<i+1<<" = "<<filenames[i]<<endl<<endl;
- out_cache<<"alphabet"<<i+1<<" = "<<a_name<<endl<<endl;
- out_cache<<"smodel-index"<<i+1<<" = "<<P.smodel_index_for_partition(i)<<endl;
- out_cache<<"imodel-index"<<i+1<<" = "<<P.imodel_index_for_partition(i)<<endl;
- out_cache<<"scale-index"<<i+1<<" = "<<P.scale_index_for_partition(i)<<endl;
+ // 1. filename
+ out_cache<<"data"<<i+1<<" = "<<filenames[i]<<endl;
+ out_screen<<"#"<<i+1<<": file = "<<filenames[i]<<endl;
+ partition["filename"] = filenames[i];
- json partition;
+ // 2. alphabet
+ string a_name = P[i].get_alphabet().name;
+ out_screen<<"#"<<i+1 <<": alphabet = "<<a_name<<"\n";
+ out_cache<<"alphabet"<<i+1<<" = "<<a_name<<endl;
+ partition["alphabet"] = a_name;
+ // 3. substitution model
+ auto s_index = P.smodel_index_for_partition(i);
+ out_screen<<"#"<<i+1<<": subst "<<indent_and_wrap(0,12,1000,SModels[*s_index].show_main(rules,false))<<" (S"<<*s_index+1<<")\n";
+ out_cache<<"smodel-index"<<i+1<<" = "<<P.smodel_index_for_partition(i)<<endl;
partition["smodel"] = optional_to_json( P.smodel_index_for_partition(i) );
+
+ // 4. indel model
+ if (auto i_index = P.imodel_index_for_partition(i))
+ out_screen<<"#"<<i+1<<": indel "<<indent_and_wrap(0,12,1000,IModels[*i_index].show_main(rules, false))<<" (I"<<*i_index+1<<")\n";
+ else
+ out_screen<<"#"<<i+1<<": indel = none\n";
+ out_cache<<"imodel-index"<<i+1<<" = "<<P.imodel_index_for_partition(i)<<endl;
partition["imodel"] = optional_to_json( P.imodel_index_for_partition(i) );
+
+ // 5. scale model
+ auto scale_index = P.scale_index_for_partition(i);
+ out_screen<<"#"<<i+1<<": scale "<<indent_and_wrap(0,12,1000,ScaleModels[*scale_index].show_main(rules,false))<<" (Scale"<<*scale_index+1<<")\n";
+ out_cache<<"scale-index"<<i+1<<" = "<<P.scale_index_for_partition(i)<<endl;
partition["scale"] = optional_to_json( P.scale_index_for_partition(i) );
- partition["filename"] = filenames[i];
- partition["alphabet"] = a_name;
+ out_screen<<endl;
+ out_cache<<endl;
partitions.push_back(partition);
}
- out_cache<<endl;
json smodels = json::array();
for(int i=0;i<P.n_smodels();i++)
@@ -182,6 +220,9 @@ json log_summary(ostream& out_cache, ostream& out_screen,ostream& out_both,
// out_cache<<"subst model"<<i+1<<" = "<<P.SModel(i).name()<<endl<<endl;
out_cache<<"subst model"<<i+1<<" "<<SModels[i].show(rules)<<endl<<endl;
smodels.push_back(SModels[i].pretty_model());
+ string e = SModels[i].show_extracted(rules);
+ if (e.size())
+ out_screen<<"Substitution model (S"<<i+1<<") -- priors:"<<e<<"\n\n";
}
json imodels = json::array();
@@ -189,6 +230,9 @@ json log_summary(ostream& out_cache, ostream& out_screen,ostream& out_both,
{
out_cache<<"indel model"<<i+1<<" "<<IModels[i].show(rules)<<endl<<endl;
imodels.push_back(IModels[i].pretty_model());
+ string e = IModels[i].show_extracted(rules);
+ if (e.size())
+ out_screen<<"Insertion/deletion model (I"<<i+1<<") -- priors:"<<e<<"\n\n";
}
json scales = json::array();
@@ -196,41 +240,9 @@ json log_summary(ostream& out_cache, ostream& out_screen,ostream& out_both,
{
out_cache<<"scale model"<<i+1<<" "<<ScaleModels[i].show(rules)<<endl<<endl;
scales.push_back(ScaleModels[i].pretty_model());
- }
-
- json tree;
- if (P.t().n_branches() > 1)
- {
- out_both<<"T:topology ~ uniform on tree topologies\n";
- tree["topology"] = "uniform";
- }
-
- if (P.t().n_branches() > 0)
- {
- out_both<<"T:lengths "<<branch_length_model.show(rules)<<endl<<endl;
- tree["lengths"] = branch_length_model.show(rules, false);
- }
-
- for(int i=0;i<P.n_data_partitions();i++)
- {
- auto s_index = P.smodel_index_for_partition(i);
- // out_screen<<"#"<<i+1<<": subst ~ "<<P.SModel(s_index).name()<<" ("<<s_index+1<<") ";
- out_screen<<"#"<<i+1 <<": alphabet = "<<P.get_data_partition(i).get_alphabet().name<<"\n";
-
- out_screen<<"#"<<i+1<<": subst "<<indent_and_wrap(0,12,1000,SModels[*s_index].show_pretty(rules))<<" (S"<<*s_index+1<<")\n";
-
- auto i_index = P.imodel_index_for_partition(i);
- string i_name = "= none";
- if (i_index)
- i_name = indent_and_wrap(0,12,1000,IModels[*i_index].show_pretty(rules));
- out_screen<<"#"<<i+1<<": indel "<<i_name;
- if (i_index and *i_index >= 0)
- out_screen<<" (I"<<*i_index+1<<")";
- out_screen<<endl;
-
- auto scale_index = P.scale_index_for_partition(i);
- out_screen<<"#"<<i+1<<": scale "<<indent_and_wrap(0,12,1000,ScaleModels[*scale_index].show(rules))<<" (Scale"<<*scale_index+1<<")\n";
- out_screen<<endl;
+ string e = ScaleModels[i].show_extracted(rules);
+ if (e.size())
+ out_screen<<"Scale model (Scale"<<i+1<<") -- priors:"<<e<<"\n\n";
}
info["partitions"] = partitions;
View it on GitLab: https://salsa.debian.org/med-team/bali-phy/commit/6d3ce2e54b62bdc0e1b80911d4bdb27a5b6f5260
---
View it on GitLab: https://salsa.debian.org/med-team/bali-phy/commit/6d3ce2e54b62bdc0e1b80911d4bdb27a5b6f5260
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20180505/41170855/attachment-0001.html>
More information about the debian-med-commit
mailing list