[med-svn] [Git][med-team/hyphy][master] 6 commits: routine-update: New upstream version

Nilesh Patra gitlab at salsa.debian.org
Mon Jan 18 14:03:09 GMT 2021



Nilesh Patra pushed to branch master at Debian Med / hyphy


Commits:
5ee63253 by Nilesh Patra at 2021-01-18T18:56:12+05:30
routine-update: New upstream version

- - - - -
2c08adc8 by Nilesh Patra at 2021-01-18T18:56:13+05:30
New upstream version 2.5.26+dfsg
- - - - -
eff9a789 by Nilesh Patra at 2021-01-18T18:56:23+05:30
Update upstream source from tag 'upstream/2.5.26+dfsg'

Update to upstream version '2.5.26+dfsg'
with Debian dir 91d3f292066951e1b7b5fb393770d93c81d82437
- - - - -
b44735d5 by Nilesh Patra at 2021-01-18T13:57:17+00:00
Add createmanpages script to manage manpages for future versions

- - - - -
9face271 by Nilesh Patra at 2021-01-18T13:57:28+00:00
Update manpage

- - - - -
eab324d4 by Nilesh Patra at 2021-01-18T13:58:42+00:00
Update changelog

- - - - -


6 changed files:

- debian/changelog
- + debian/createmanpages
- debian/manuals/hyphy.1
- res/TemplateBatchFiles/SelectionAnalyses/RELAX.bf
- res/TemplateBatchFiles/libv3/tasks/estimators.bf
- src/core/likefunc.cpp


Changes:

=====================================
debian/changelog
=====================================
@@ -1,3 +1,12 @@
+hyphy (2.5.26+dfsg-1) unstable; urgency=medium
+
+  * New upstream version 2.5.26+dfsg
+  * Add createmanpages script to
+    manage manpages for future versions
+  * Update hyphy manpage
+
+ -- Nilesh Patra <npatra974 at gmail.com>  Mon, 18 Jan 2021 13:57:49 +0000
+
 hyphy (2.5.25+dfsg-1) unstable; urgency=medium
 
   * New upstream version


=====================================
debian/createmanpages
=====================================
@@ -0,0 +1,31 @@
+#!/bin/sh
+MANDIR=debian/manuals
+mkdir -p $MANDIR
+
+VERSION=`dpkg-parsechangelog | awk '/^Version:/ {print $2}' | sed -e 's/^[0-9]*://' -e 's/-.*//' -e 's/[+~]dfsg$//'`
+NAME=`grep "^Description:" debian/control | sed 's/^Description: *//' | head -n1`
+PROGNAME=`grep "^Package:" debian/control | sed 's/^Package: *//' | head -n1`
+
+AUTHOR=".SH AUTHOR\n \
+This manpage was written by $DEBFULLNAME for the Debian distribution and\n \
+can be used for any other usage of the program.\
+"
+
+# If program name is different from package name or title should be
+# different from package short description change this here
+progname=hyphy
+help2man --no-info --no-discard-stderr --help-option="-h" \
+         --name="$NAME" \
+            --version-string="$VERSION" ${progname} > $MANDIR/${progname}.1
+echo $AUTHOR >> $MANDIR/${progname}.1
+
+echo "$MANDIR/hyphy.1" > debian/hyphy-pt.manpages
+echo "$MANDIR/HYPHYMPI.1" > debian/hyphy-mpi.manpages 
+rm -f errors.log
+
+cat <<EOT
+Please enhance the help2man output.
+The following web page might be helpful in doing so:
+    http://liw.fi/manpages/
+EOT
+


=====================================
debian/manuals/hyphy.1
=====================================
@@ -1,19 +1,12 @@
 .\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.47.16.
-.TH HYPHY "1" "October 2020" "HYPHY 2.5.19" "User Commands"
+.TH HYPHY "1" "January 2021" "hyphy 2.5.26" "User Commands"
 .SH NAME
-HYPHY \- Hypothesis testing using Phylogenies
-.SH SYNOPSIS
-.B hyphy
-[\fI\,-h\/\fR] [\fI\,--help\/\fR][\fI\,-c\/\fR] [\fI\,-d\/\fR] [\fI\,-i\/\fR] [\fI\,-p\/\fR] [\fI\,BASEPATH=directory path\/\fR] [\fI\,CPU=integer\/\fR] [\fI\,LIBPATH=library path\/\fR] [\fI\,USEPATH=library path\/\fR] [\fI\,<standard analysis name> or <path to hyphy batch file>\/\fR] [\fI\,--keyword value \/\fR...] [\fI\,positional arguments \/\fR...]
-.br
-.B HYPHYMPI
-[\fI\,-h\/\fR] [\fI\,--help\/\fR][\fI\,-c\/\fR] [\fI\,-d\/\fR] [\fI\,-i\/\fR] [\fI\,-p\/\fR] [\fI\,BASEPATH=directory path\/\fR] [\fI\,CPU=integer\/\fR] [\fI\,LIBPATH=library path\/\fR] [\fI\,USEPATH=library path\/\fR] [\fI\,<standard analysis name> or <path to hyphy batch file>\/\fR] [\fI\,--keyword value \/\fR...] [\fI\,positional arguments \/\fR...]
+hyphy \- Hypothesis testing using Phylogenies (pthreads version)
 .SH DESCRIPTION
-HyPhy is an open-source software package for the analysis of genetic sequences using techniques in phylogenetics,
-molecular evolution,
-and machine learning.
-.SH OPTIONS
-.SS "Flags:"
+usage: hyphy or HYPHYMPI [\-h] [\-\-help][\-c] [\-d] [\-i] [\-p] [BASEPATH=directory path] [CPU=integer] [LIBPATH=library path] [USEPATH=library path] [<standard analysis name> or <path to hyphy batch file>] [\-\-keyword value ...] [positional arguments ...]
+.PP
+Execute a HyPhy analysis, either interactively, or in batch mode
+optional flags:
 .TP
 \fB\-h\fR \fB\-\-help\fR
 show this help message and exit
@@ -28,41 +21,65 @@ debug mode; causes HyPhy to drop into an expression evaluation mode upon script
 interactive mode; causes HyPhy to always prompt the user for analysis options, even when defaults are available
 .TP
 \fB\-p\fR
-postprocessor mode; drops HyPhy into an interactive mode where general post\-processing scripts can be selected upon analysis completion
+postprocessor mode; drops HyPhy into an interactive mode where general post\-processing scripts can be selected
+upon analysis completion
 .TP
 \fB\-m\fR
 write diagnostic messages to messages.log
-.SS "Optional global arguments:"
+.SS "optional global arguments:"
 .TP
 BASEPATH=directory path
 defines the base directory for all path operations (default is pwd)
 .TP
 CPU=integer
-if compiled with OpenMP multithreading support, requests this many threads; HyPhy could use fewer than this but never more; default is the number of CPU cores (as computed by OpenMP) on the system
+if compiled with OpenMP multithreading support, requests this many threads; HyPhy could use fewer than this
+but never more; default is the number of CPU cores (as computed by OpenMP) on the system
 .TP
 LIBPATH=directory path
-defines the directory where HyPhy library files are located (default installed location is \fI\,/usr/lib/hyphy\/\fP)
+defines the directory where HyPhy library files are located (default installed location is \fI\,/usr/local/lib/hyphy\/\fP
+or as configured during CMake installation
 .TP
 USEPATH=directory path
 specifies the optional working and relative path directory (default is BASEPATH)
 .TP
 ENV=expression
-set HBL environment variables via explicit statements for example ENV='DEBUG_MESSAGES=1;WRITE_LOGS=1'
+set HBL environment variables via explicit statements
+for example ENV='DEBUG_MESSAGES=1;WRITE_LOGS=1'
 .TP
 batch file to run
 if specified, execute this file, otherwise drop into an interactive mode
 .TP
 analysis arguments
 if batch file is present, all remaining positional arguments are interpreted as inputs to analysis prompts
-.SS "Optional keyword arguments:"
-.P
-Keywords can appear anywhere and will be consumed by the requested analysis.
+.PP
+optional keyword arguments (can appear anywhere); will be consumed by the requested analysis
 .TP
 \fB\-\-keyword\fR value
-will be passed to the analysis (which uses KeywordArgument directives) multiple values for the same keywords are treated as an array of values for multiple selectors
-.SS "Available standard keyword analyses:"
-.P
-Standard keyword analyses are located in \fI\,/usr/share/hyphy\/\fP.
+will be passed to the analysis (which uses KeywordArgument directives)
+multiple values for the same keywords are treated as an array of values for multiple selectors
+.PP
+usage examples:
+.SS "Select a standard analysis from the list :"
+.IP
+hyphy \fB\-i\fR
+.PP
+Run a standard analysis with default options and one required user argument;
+.IP
+hyphy busted \fB\-\-alignment\fR path/to/file
+.PP
+Run a standard analysis with additional keyword arguments
+.IP
+hyphy busted \fB\-\-alignment\fR path/to/file \fB\-\-srv\fR No
+.PP
+See which arguments are understood by a standard analysis
+.IP
+hyphy busted \fB\-\-help\fR
+.PP
+Run a custom analysis and pass it some arguments
+.IP
+hyphy path/to/hyphy.script argument1 'argument 2'
+.PP
+Available standard keyword analyses (located in /home/nilesh/packages/hyphy/hyphy/res/)
 .TP
 meme
 [MEME] Test for episodic site\-level selection using MEME (Mixed Effects Model of Evolution).
@@ -285,24 +302,6 @@ Test for positive selection using the approach of Nielsen and Yabg, by sampling
 .TP
 gard
 [GARD] Screen an alignment using GARD (requires an MPI environment).
-.SH EXAMPLES
-.TP
-hyphy \-i
-Select a standard analysis from the list
-.TP
-hyphy busted \-\-alignment path/to/file
-Run a standard analysis with default options and one required user argument;
-.TP
-hyphy busted \-\-alignment path/to/file \-\-srv No
-Run a standard analysis with additional keyword arguments
-.TP
-hyphy busted \-\-help
-See which arguments are understood by a standard analysis
-.TP
-hyphy path/to/hyphy.script argument1 'argument 2'
-Run a custom analysis and pass it some arguments
 .SH AUTHOR
-.PP
-HyPhy is written by Sergei L Kosakovsky Pond, Simon D. W. Frost, Spencer V. Muse, and others.
-.PP
-The present manual page is written by Étienne Mollier <etienne.mollier at mailoo.org> for the Debian project.
+ This manpage was written by Nilesh Patra for the Debian distribution and
+ can be used for any other usage of the program.


=====================================
res/TemplateBatchFiles/SelectionAnalyses/RELAX.bf
=====================================
@@ -367,11 +367,20 @@ if (relax.model_set == "All") { // run all the models
         relax.weight_multipliers    = parameters.helper.stick_breaking (utility.SwapKeysAndValues(utility.MatrixToDict(relax.distribution["weights"])),None);
         relax.constrain_parameters   = parameters.ConstrainMeanOfSet(relax.distribution["rates"],relax.weight_multipliers,1,"relax");
         
+        
+        relax.i = 0;
         for (key, value; in; relax.constrain_parameters[terms.global]){
             model.generic.AddGlobal (relax.ge.bsrel_model, value, key);
-            parameters.SetRange (value, terms.range_almost_01);
+            relax.i += 1;
+            if (relax.i < relax.rate_classes) {
+                parameters.SetRange (value, terms.range_almost_01);
+            } else {
+                parameters.SetRange (value, terms.range_gte1);
+            }
+            
         }
         
+        
         relax.distribution["rates"] = Transpose (utility.Values (relax.constrain_parameters[terms.global]));
         
         for (relax.i = 1; relax.i < relax.rate_classes; relax.i += 1) {
@@ -397,7 +406,8 @@ if (relax.model_set == "All") { // run all the models
             math.Mean ( 
                 utility.Map (selection.io.extract_global_MLE_re (relax.final_partitioned_mg_results, "^" + terms.parameters.omega_ratio + ".+"), "_v_", "_v_[terms.fit.MLE]"));
                 
-            relax.init_grid_setup        (relax.distribution);
+                
+            relax.init_grid_setup       (relax.distribution);
             relax.initial_grid         = estimators.LHC (relax.initial_ranges,relax.initial_grid.N);
             relax.initial_grid = utility.Map (relax.initial_grid, "_v_", 
                 'relax._renormalize (_v_, "relax.distribution", relax.initial.test_mean)'
@@ -406,8 +416,11 @@ if (relax.model_set == "All") { // run all the models
             
             
             parameters.DeclareGlobalWithRanges ("relax.bl.scaler", 1, 0, 1000);
+            for (i, v; in; relax.initial_grid) {
+                v["relax.bl.scaler"] = {terms.id : "relax.bl.scaler", terms.fit.MLE : Random (2,4)};
+            }
             
-                         
+                          
             relax.grid_search.results =  estimators.FitLF (relax.filter_names, relax.trees,{ "0" : {"DEFAULT" : "relax.ge"}},
                                         relax.final_partitioned_mg_results,
                                         relax.model_object_map, 
@@ -429,7 +442,8 @@ if (relax.model_set == "All") { // run all the models
                                         }
             );
             
-
+ 
+ 
             relax.general_descriptive.fit =  estimators.FitLF (relax.filter_names,
                                         relax.trees,
                                         { "0" : {"DEFAULT" : "relax.ge"}},
@@ -1462,6 +1476,31 @@ function relax.init_grid_setup (omega_distro) {
 
 //------------------------------------------------------------------------------
 
+function relax.init_grid_setup_scaled (omega_distro) {
+    utility.ForEachPair (omega_distro[terms.parameters.rates], "_index_", "_name_", 
+        '
+              relax.initial_ranges [_name_] = {
+                terms.lower_bound : 0,
+                terms.upper_bound : 1
+             };
+            
+        '
+    );
+
+
+    utility.ForEachPair (omega_distro[terms.parameters.weights], "_index_", "_name_", 
+        '
+             relax.initial_ranges [_name_] = {
+                terms.lower_bound : 0,
+                terms.upper_bound : 1
+            };
+        '
+    );
+
+}
+
+//------------------------------------------------------------------------------
+
 lfunction relax._renormalize (v, distro, mean) {
 
     parameters.SetValues (v);


=====================================
res/TemplateBatchFiles/libv3/tasks/estimators.bf
=====================================
@@ -185,7 +185,7 @@ function estimators.SetGlobals2(key2, value) {
     if (Type(__init_value) != "AssociativeList") {
         __init_value = (initial_values[terms.global])[key2];
     }
-        
+
     
     if (Type(__init_value) == "AssociativeList") {
         if (__init_value[terms.fix]) {


=====================================
src/core/likefunc.cpp
=====================================
@@ -2675,7 +2675,7 @@ void    _LikelihoodFunction::CheckDependentBounds (void) {
         lowerBounds.theData[index]      =   cornholio->GetLowerBound();
         upperBounds.theData[index]      =   cornholio->GetUpperBound();
         
-        //fprintf (stderr, "_LikelihoodFunction::CheckDependentBounds variable %s (%d), current value %g, range %g to %g\n", cornholio->theName->sData, index, currentValues.theData[index], lowerBounds.theData[index], upperBounds.theData[index]);
+        //fprintf (stderr, "_LikelihoodFunction::CheckDependentBounds variable %s (%d), current value %g, range %g to %g\n", cornholio->theName->get_str(), index, currentValues.theData[index], lowerBounds.theData[index], upperBounds.theData[index]);
         
         bool badApple = currentValues.theData[index]<lowerBounds.theData[index] || currentValues.theData[index]>upperBounds.theData[index];
         if (badApple) {
@@ -2734,7 +2734,7 @@ void    _LikelihoodFunction::CheckDependentBounds (void) {
             SetIthIndependent (index,temp);
         }
         
-        //fprintf (stderr, "\n%s\n", _String((_String*)dependancies.toStr()).sData);
+        //fprintf (stderr, "\n%s\n", _String((_String*)dependancies.toStr()).get_str());
         
         // now we can go through the dependant variables which are out of bounds one at a time
         // and attempt to move them back in.
@@ -2863,16 +2863,16 @@ void    _LikelihoodFunction::CheckDependentBounds (void) {
         
         tagged.ReorderList();
         
-        // fprintf (stderr, "Tagged the following variables %s\n", _String((_String*)_aux.toStr()).sData);
+        //fprintf (stderr, "Tagged the following variables %s\n", _String((_String*)_aux.toStr()).get_str());
         
         
         for (index = 0; index<indexInd.lLength; index++) {
             dependancies.Store (0,index,GetIthIndependentBound (index,true));
-            dependancies.Store (1,index,(GetIthIndependentBound (index,false)>10?10:GetIthIndependentBound (index,true))-dependancies(0,index));
+            dependancies.Store (1,index,(GetIthIndependentBound (index,false)>10?10:GetIthIndependentBound (index,false))-dependancies(0,index));
             dependancies.Store (2,index,GetIthIndependent (index));
         }
         
-        // fprintf (stderr, "\n%s\n", _String((_String*)dependancies.toStr()).sData);
+        //fprintf (stderr, "\n%s\n", _String((_String*)dependancies.toStr()).get_str());
         
         
         for (i = 0L; i < 10000L; i++) {
@@ -2880,14 +2880,14 @@ void    _LikelihoodFunction::CheckDependentBounds (void) {
             for (long v = 0L; v < _aux.lLength; v++) {
                 index = _aux.get(v);
                 SetIthIndependent   (index,dependancies(0,index)+genrand_real2()*dependancies(1,index));
-                //fprintf (stderr, "[%d] %s => %g\n", index, GetIthIndependentName(index)->sData, GetIthIndependent(index));
+                //fprintf (stderr, "[%d] %s => %g (%g - %g)\n", index, GetIthIndependentName(index)->get_str(), GetIthIndependent(index), dependancies(0,index), dependancies(1,index));
             }
             for (j = 0; j < nonConstantDep->lLength; j++) {
                 // check whether any of the dependent variables are out of bounds
                 long j_corrected = nonConstantIndices.get(j);
                 currentValues.theData[j_corrected]    =   LocateVar(nonConstantDep->list_data[j])->Compute()->Value();
-                //fprintf (stderr, "[%d] %g (%g, %g)\n", j, j_corrected, currentValues.theData[j_corrected], lowerBounds.theData[j_corrected], upperBounds[j_corrected]);
                 if (currentValues.theData[j_corrected]<lowerBounds.theData[j_corrected] || currentValues.theData[j_corrected]>upperBounds.theData[j_corrected]) {
+                    //fprintf (stderr, "[%d] %s => %g (%g, %g)\n", j, LocateVar(nonConstantDep->list_data[j])->GetName()->get_str(), j_corrected, currentValues.theData[j_corrected], lowerBounds.theData[j_corrected], upperBounds[j_corrected]);
                     //fprintf (stderr, "===| CHECK FAILED\n");
                     badConstraint = nonConstantDep->list_data[j];
                     break;



View it on GitLab: https://salsa.debian.org/med-team/hyphy/-/compare/e997fbffb757a78e6c0010833ba0cc0d152aff93...eab324d4766f5766c88086b41091363bd1617f75

-- 
View it on GitLab: https://salsa.debian.org/med-team/hyphy/-/compare/e997fbffb757a78e6c0010833ba0cc0d152aff93...eab324d4766f5766c88086b41091363bd1617f75
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20210118/0a7bc0f8/attachment-0001.html>


More information about the debian-med-commit mailing list