[med-svn] [Git][med-team/hyphy][upstream] New upstream version 2.5.29+dfsg

Michael R. Crusoe gitlab at salsa.debian.org
Fri Feb 26 06:40:28 GMT 2021



Michael R. Crusoe pushed to branch upstream at Debian Med / hyphy


Commits:
6884fa66 by Michael R. Crusoe at 2021-02-25T17:40:09+01:00
New upstream version 2.5.29+dfsg
- - - - -


28 changed files:

- res/TemplateBatchFiles/GARD.bf
- res/TemplateBatchFiles/PairwiseRelativeRatio.bf
- res/TemplateBatchFiles/SelectionAnalyses/RELAX.bf
- res/TemplateBatchFiles/SelectionAnalyses/modules/shared-load-file.bf
- res/TemplateBatchFiles/TemplateModels/GY94.ibf
- res/TemplateBatchFiles/TemplateModels/GY94.mdl
- res/TemplateBatchFiles/files.lst
- res/TemplateBatchFiles/libv3/UtilityFunctions.bf
- res/TemplateBatchFiles/libv3/convenience/regexp.bf
- res/TemplateBatchFiles/libv3/tasks/alignments.bf
- res/TemplateBatchFiles/libv3/tasks/trees.bf
- res/TemplateBatchFiles/readIndexFile.bf
- src/core/associative_list.cpp
- src/core/batchlan.cpp
- src/core/batchlan2.cpp
- src/core/batchlanruntime.cpp
- src/core/category.cpp
- src/core/constant.cpp
- src/core/formula.cpp
- src/core/global_object_lists.cpp
- src/core/global_things.cpp
- src/core/include/batchlan.h
- src/core/include/formula.h
- src/core/include/global_object_lists.h
- src/core/include/variable.h
- src/core/likefunc.cpp
- src/core/operation.cpp
- src/core/parser.cpp


Changes:

=====================================
res/TemplateBatchFiles/GARD.bf
=====================================
@@ -42,7 +42,7 @@
 
 /* 1a. Initial Setup
 ------------------------------------------------------------------------------*/
-RequireVersion ("2.4.0");
+RequireVersion ("2.5.29");
 
 LoadFunctionLibrary ("libv3/all-terms.bf");
 LoadFunctionLibrary ("libv3/convenience/regexp.bf");
@@ -75,12 +75,15 @@ namespace terms.gard {
     nucleotide = "nucleotide";
     protein    = "amino-acid";
     codon      = "codon";
+    masterList = "masterList";
 };
 
 gard.json = {   terms.json.analysis: gard.analysisDescription,
                 terms.json.input: {},
             };
 
+utility.SetEnvVariable ("OPTIMIZE_SUMMATION_ORDER_PARTITION", 100); 
+// don't spend too much time optimizing column ordering.
 
 /* 1b. User Input
 ------------------------------------------------------------------------------*/
@@ -164,7 +167,50 @@ function gard.model.withGDD  (options) {
 // Where to save the json
 gard.defaultJsonFilePath = (gard.alignment[terms.data.file] + '.GARD.json');
 KeywordArgument ("output", "Write the resulting JSON to this file (default is to save to the same path as the alignment file + 'GARD.json')", gard.defaultJsonFilePath);
-gard.jsonFileLocation = io.PromptUserForFilePath ("Save the resulting JSON file to");
+gard.jsonFileLocation = io.ReadFromOrCreate ("Save the resulting JSON file to", gard.json);
+
+gard.json = gard.jsonFileLocation [^"terms.data.value"];
+gard.jsonFileLocation = gard.jsonFileLocation [^"terms.data.file"];
+
+KeywordArgument ("mode", "Run mode (Normal or Faster)", "Normal");
+
+gard.fastRunMode = io.SelectAnOption  ({"Normal" : "Default optimization and convergence settings",
+                                        "Faster" :  "Reduce individual optimization precision and relax convergence settings"},
+                                        "Run type") == "Faster";
+
+
+// Setup master list of evaluated models.
+gard.masterList = {};
+/** gard.masterList:
+    "model string": "model fitness (cAIC score)"
+    model string is in the format: "{\n{bp1} \n{bp2} ...\n}"
+    model fitness is either infinity (if not evaluated) or the numeric cAIC score
+**/
+
+
+gard.startWithBP = 0;
+
+if (utility.Has (gard.json, terms.gard.masterList, "AssociativeList")) {
+    console.log ("
+
+### Found partial run results in `gard.jsonFileLocation`. 
+Will resume search from the end of the previous run.
+
+    ");
+    
+    gard.startWithBP = utility.Array1D(gard.json["breakpointData"]) - 1;
+    gard.masterList = gard.json[terms.gard.masterList];
+}  else {
+    if (utility.Has (gard.json, "breakpointData", "AssociativeList")) {
+        console.log ("
+
+    ### Found previous run results in `gard.jsonFileLocation`. 
+    Run terminating without overwriting previous results.
+
+        ");
+        return 0;
+    }
+}
 
 gard.defaultFitFilePath = (gard.alignment[terms.data.file] + '.best-gard');
 KeywordArgument ("output-lf", "Write the best fitting HyPhy analysis snapshot to (default is to save to the same path as the alignment file + 'best-gard')", gard.defaultFitFilePath);
@@ -197,15 +243,6 @@ for (_pattern_; in; alignments.Extract_site_patterns ("gard.filter")) {
     }
 }
 
-/*
-utility.ForEach (alignments.Extract_site_patterns ("gard.filter"), "_pattern_", "
-    if (_pattern_[terms.data.is_constant]==FALSE) {
-        utility.ForEachPair (_pattern_[terms.data.sites], '_key_', '_value_',
-        '
-            gard.variableSiteMap + (gard.siteMultiplier*_value_ + gard.siteShift);
-        ');
-    }
-");*/
 
 gard.variableSiteMap = Transpose (utility.DictToArray (gard.variableSiteMap)) % 0; // sort by 1st column
 gard.variableSites = Rows (gard.variableSiteMap);
@@ -259,21 +296,22 @@ io.ReportProgressMessageMD("GARD", "baseline-fit", "* " + selection.io.report_fi
 ------------------------------------------------------------------------------*/
 // Setup mpi variableSiteMap
 gard.createLikelihoodFunctionForExport ("gard.exportedModel", gard.model);
+
+if (fastRunMode) {
+    utility.SetEnvVariable ("OPTIMIZATION_PRECISION", 0.1); 
+} else {
+    utility.SetEnvVariable ("OPTIMIZATION_PRECISION", 0.001);     
+}
+
+
 gard.queue = mpi.CreateQueue (
                             {
                             "LikelihoodFunctions" : {{"gard.exportedModel"}},
                             "Headers" : {{"libv3/all-terms.bf"}},
-                            "Variables" : {{"gard.globalParameterCount", "gard.numSites", "gard.alignment", "gard.variableSiteMap", "gard.dataType", "terms.gard.codon"}}
+                            "Variables" : {{"gard.globalParameterCount", "gard.numSites", "gard.alignment", "gard.variableSiteMap", "gard.dataType", "terms.gard.codon","OPTIMIZE_SUMMATION_ORDER_PARTITION", "OPTIMIZATION_PRECISION"}}
                             }
                         );
 
-// Setup master list of evaluated models.
-gard.masterList = {};
-/** gard.masterList:
-    "model string": "model fitness (cAIC score)"
-    model string is in the format: "{\n{bp1} \n{bp2} ...\n}"
-    model fitness is either infinity (if not evaluated) or the numeric cAIC score
-**/
 
 gard.masterList [{{}}] = gard.baseline_cAIC;
 gard.bestOverall_cAIC_soFar = gard.baseline_cAIC;
@@ -298,56 +336,63 @@ gard.json['baselineScore'] = gard.baseline_cAIC;
 ------------------------------------------------------------------------------*/
 io.ReportProgressMessageMD('GARD', 'single-breakpoint', 'Performing an exhaustive single breakpoint analysis');
 
-namespace gard {
 
-    // 2a1. Loop over every valid single break point
-    singleBreakPointBest_cAIC = ^"math.Infinity";
+if (gard.startWithBP > 0) {
+    io.ReportProgressMessageMD('GARD', 'single-breakpoint', 'Single breakpoint analysis already completed.');
+    gard.improvements = gard.json ["improvements"];
+} else {
+    namespace gard {
 
-    for (breakPointIndex = 0; breakPointIndex <variableSites - 1; breakPointIndex += 1) {
-        siteIndex = variableSiteMap [breakPointIndex];
+        // 2a1. Loop over every valid single break point
+        singleBreakPointBest_cAIC = ^"math.Infinity";
 
-        if (singleBreakPointBest_cAIC < baseline_cAIC) {
-            io.ReportProgressBar ("GARD", "Breakpoint " +  Format (1+breakPointIndex, 10, 0) + " of " + (variableSites-1) + ". Best cAIC = " + Format (singleBreakPointBest_cAIC, 12, 4) + " [delta = " + Format (baseline_cAIC - singleBreakPointBest_cAIC, 12, 4) + "] with breakpoint at site " + Format (singleBreakPointBestLocation, 10, 0));
-        } else {
-            io.ReportProgressBar ("GARD", "Breakpoint " +  Format (1+breakPointIndex, 10, 0) + " of " + (variableSites-1) + ". Best cAIC = " + Format (baseline_cAIC, 12, 4) + " with no breakpoints." );
-        }
+        for (breakPointIndex = 0; breakPointIndex <variableSites - 1; breakPointIndex += 1) {
+            siteIndex = variableSiteMap [breakPointIndex];
+
+            if (singleBreakPointBest_cAIC < baseline_cAIC) {
+                io.ReportProgressBar ("GARD", "Breakpoint " +  Format (1+breakPointIndex, 10, 0) + " of " + (variableSites-1) + ". Best cAIC = " + Format (singleBreakPointBest_cAIC, 12, 4) + " [delta = " + Format (baseline_cAIC - singleBreakPointBest_cAIC, 12, 4) + "] with breakpoint at site " + Format (singleBreakPointBestLocation, 10, 0));
+            } else {
+                io.ReportProgressBar ("GARD", "Breakpoint " +  Format (1+breakPointIndex, 10, 0) + " of " + (variableSites-1) + ". Best cAIC = " + Format (baseline_cAIC, 12, 4) + " with no breakpoints." );
+            }
 
 
-        if (gard.validatePartititon ({{siteIndex}}, minPartitionSize, numSites) == FALSE)  {
-            continue;
-        }
+            if (gard.validatePartititon ({{siteIndex}}, minPartitionSize, numSites) == FALSE)  {
+                continue;
+            }
 
-        mpi.QueueJob (queue, "gard.obtainModel_cAIC", {"0" : {{siteIndex__}},
-                                                     "1" : model,
-                                                     "2" : baseLikelihoodInfo},
-                                                     "gard.storeSingleBreakPointModelResults");
+            mpi.QueueJob (queue, "gard.obtainModel_cAIC", {"0" : {{siteIndex__}},
+                                                         "1" : model,
+                                                         "2" : baseLikelihoodInfo},
+                                                         "gard.storeSingleBreakPointModelResults");
                                                      
 
-    }
+        }
 
-    mpi.QueueComplete (queue);
-    io.ClearProgressBar();
+        mpi.QueueComplete (queue);
+        io.ClearProgressBar();
 
-    // 2a2. Report the status of the sinlge break point analysis
-    io.ReportProgressMessageMD('GARD', 'single-breakpoint', 'Done with single breakpoint analysis.');
-    io.ReportProgressMessageMD('GARD', 'single-breakpoint', ("   Best sinlge break point location: " + singleBreakPointBestLocation));
-    io.ReportProgressMessageMD('GARD', 'single-breakpoint', ("   c-AIC  = " + singleBreakPointBest_cAIC));
-}
+        // 2a2. Report the status of the sinlge break point analysis
+        io.ReportProgressMessageMD('GARD', 'single-breakpoint', 'Done with single breakpoint analysis.');
+        io.ReportProgressMessageMD('GARD', 'single-breakpoint', ("   Best sinlge break point location: " + singleBreakPointBestLocation));
+        io.ReportProgressMessageMD('GARD', 'single-breakpoint', ("   c-AIC  = " + singleBreakPointBest_cAIC));
+    }
 
-// 2a3. Evaluate if the best single breakpoint is the overall best model
-if (gard.singleBreakPointBest_cAIC < gard.bestOverall_cAIC_soFar) {
-    gard.bestOverall_cAIC_soFar = gard.singleBreakPointBest_cAIC;
-    gard.bestOverallModelSoFar = {{gard.singleBreakPointBestLocation}};
-    gard.improvements = {'0': {
-                                "deltaAICc": gard.baseline_cAIC - gard.bestOverall_cAIC_soFar,
-                                "breakpoints": gard.bestOverallModelSoFar
-                              }
-                        };
-} else {
-    gard.bestOverallModelSoFar = null;
+    // 2a3. Evaluate if the best single breakpoint is the overall best model
+    if (gard.singleBreakPointBest_cAIC < gard.bestOverall_cAIC_soFar) {
+        gard.bestOverall_cAIC_soFar = gard.singleBreakPointBest_cAIC;
+        gard.bestOverallModelSoFar = {{gard.singleBreakPointBestLocation}};
+        gard.improvements = {'0': {
+                                    "deltaAICc": gard.baseline_cAIC - gard.bestOverall_cAIC_soFar,
+                                    "breakpoints": gard.bestOverallModelSoFar
+                                  }
+                            };
+    } else {
+        gard.bestOverallModelSoFar = null;
+    }
+
+    gard.concludeAnalysis(gard.bestOverallModelSoFar, TRUE);
 }
 
-gard.concludeAnalysis(gard.bestOverallModelSoFar);
 
 
 /* 2b. Evaluation of multiple break points with genetic algorithm
@@ -355,6 +400,7 @@ gard.concludeAnalysis(gard.bestOverallModelSoFar);
 io.ReportProgressMessageMD('GARD', 'multi-breakpoint', 'Performing multi breakpoint analysis using a genetic algorithm');
 
 namespace gard {
+
     // GA.1: Setup global parameters
     populationSize = 32; // the GARD paper used: (numberOfMpiNodes*2 - 2) with 17 mpi nodes
     if(populationSize < mpi.NodeCount() -1 ) {
@@ -364,15 +410,35 @@ namespace gard {
     rateOfMutationsTharAreSmallShifts = 0.8; // some mutations are a new random break point; some are small shifts of the break point to an adjacent location.
     maxFailedAttemptsToMakeNewModel = 7;
     cAIC_diversityThreshold   = 0.01;
-    cAIC_improvementThreshold = 0.01; // I think this was basically 0 in the gard paper
-    maxGenerationsAllowedWithNoNewModelsAdded = 2; // TODO: Not in the GARD paper. use 10?
-    maxGenerationsAllowedAtStagnant_cAIC = 100; // TODO: this is set to 100 in the GARD paper
+ 
+    if (fastRunMode) {
+        maxGenerationsAllowedAtStagnant_cAIC = Min (populationSize, 40);
+        cAIC_improvementThreshold = 2;
+    } else {
+        maxGenerationsAllowedAtStagnant_cAIC = 100;
+        cAIC_improvementThreshold = 0.01;
+    }
 
+    maxGenerationsAllowedWithNoNewModelsAdded = maxGenerationsAllowedAtStagnant_cAIC $ 4; // TODO: Not in the GARD paper. use 10?
+    
     // GA.2: Loop over increasing number of break points
     addingBreakPointsImproves_cAIC = TRUE;
-    numberOfBreakPointsBeingEvaluated = 1;
+    if (startWithBP > 0) {
+        numberOfBreakPointsBeingEvaluated = startWithBP;
+
+        bestOverallModelSoFar = {1, startWithBP};
+        for (i, v; in; json["breakpointData"]) {
+            if (i < startWithBP) {
+                bestOverallModelSoFar[+i] = +(v['bps'])[1];
+            }
+        }
+    } else {
+        numberOfBreakPointsBeingEvaluated = 1;
+    }
+    
     while(addingBreakPointsImproves_cAIC) {
-        // GA.2.a Setup for n number of break points
+        //#profile START;
+       // GA.2.a Setup for n number of break points
         numberOfBreakPointsBeingEvaluated+=1;
         generationsAtCurrentBest_cAIC = 0;
         generationsNoNewModelsAdded = 0;
@@ -477,7 +543,9 @@ namespace gard {
         } else {
             addingBreakPointsImproves_cAIC = FALSE;
         }
-        gard.concludeAnalysis(bestOverallModelSoFar);
+        gard.concludeAnalysis(bestOverallModelSoFar, addingBreakPointsImproves_cAIC);
+        //#profile _hyphy_profile_dump;
+        //utility.FinishAndPrintProfile (_hyphy_profile_dump);
     }
 
 }
@@ -508,7 +576,7 @@ namespace gard {
  */
 
 lfunction gard.fitPartitionedModel (breakPoints, model, initialValues, saveToFile, constrainToOneTopology) {
-
+    //#profile START;
 
     currentIndex = 0;
     currentStart = 0;
@@ -583,6 +651,9 @@ lfunction gard.fitPartitionedModel (breakPoints, model, initialValues, saveToFil
     DeleteObject (likelihoodFunction, :shallow);
 
     res[^"terms.parameters"] += df + (model[^"terms.parameters"])[^"terms.model.empirical"];
+    
+    //#profile _fit_dump;
+    //utility.FinishAndPrintProfile (_fit_dump); 
     return res;
 
 }
@@ -653,12 +724,19 @@ lfunction gard.modelIsNotInMasterList(masterList, breakPoints) {
     //return utility.KeyExists(masterList, '' + breakPoints) == FALSE;
 }
 
-function gard.concludeAnalysis(bestOverallModel) {
+function gard.concludeAnalysis(bestOverallModel, writeMaster) {
+    
+    
+    if (writeMaster) {
+        (gard.json)[terms.gard.masterList] = gard.masterList;
+    } else {
+        (gard.json) - terms.gard.masterList;
+    }
     (gard.json)['timeElapsed'] = Time(1) - gard.startTime;
     (gard.json)['siteBreakPointSupport'] = gard.getSiteBreakPointSupport(gard.masterList, gard.bestOverall_cAIC_soFar);
     (gard.json)['singleTreeAICc'] = gard.getSingleTree_cAIC(bestOverallModel);
     (gard.json)['totalModelCount'] = Abs(gard.masterList);
-
+    (gard.json)['bestModelAICc'] = gard.bestOverall_cAIC_soFar;
     gard.setBestModelTreeInfoToJson(bestOverallModel);
 
     if(Abs((gard.json)['trees']) > 1) {
@@ -721,38 +799,27 @@ lfunction gard.getSingleTree_cAIC(bestOverallModel) {
 }
 
 lfunction gard.getSiteBreakPointSupport(modelMasterList, best_cAIC_score) {
-    gard.masterListModels = utility.Keys(modelMasterList);
-    gard.masterList_cAIC_values = utility.Values(modelMasterList);
-    gard.numberOfModels = Columns(gard.masterList_cAIC_values);
-
-    gard.siteAkaikeWeights = {};
-    for(modelIndex=0; modelIndex<gard.numberOfModels; modelIndex=modelIndex+1) {
-        gard.cAIC_delta = best_cAIC_score - gard.masterList_cAIC_values[modelIndex];
-        gard.akaikeWeight = Exp(gard.cAIC_delta * 0.5);
 
-        if( Abs(gard.masterListModels[modelIndex]) > 3) {
-            gard.breakPointMatrix = gard.Helper.convertMatrixStringToMatrix(gard.masterListModels[modelIndex]);
-        } else {
-            gard.breakPointMatrix = 0;
-        }
-        gard.numberOfBreakPoints = Columns(gard.breakPointMatrix);
-
-        for(breakPointIndex=0; breakPointIndex<gard.numberOfBreakPoints; breakPointIndex=breakPointIndex+1) {
-            gard.siteAkaikeWeights[gard.breakPointMatrix[breakPointIndex]] += gard.akaikeWeight;
+    bp_support = {};
+    total_weight = 0;
+    for (model, score; in; modelMasterList) {
+        aicw = Exp ((best_cAIC_score-score)*0.5);
+        if (aicw > 0) {
+            total_weight += aicw;
+            bps = Eval (model);
+            for (bp; in; bps) {
+                bp_support[bp] += aicw;
+            }
         }
     }
-
-    gard.akaikeWeightScallingFactor = 1 / (Max(gard.siteAkaikeWeights)['value']);
-    gard.normalizedSiteAkaikeWeights = {};
-
-    gard.potentialBreakPointList = utility.Keys(gard.siteAkaikeWeights);
-    gard.numberOfPotentialBreakPoints = Abs(gard.siteAkaikeWeights);
-    for(breakPointIndex=0; breakPointIndex<gard.numberOfPotentialBreakPoints; breakPointIndex+=1) {
-        siteIndex = gard.potentialBreakPointList[breakPointIndex];
-        gard.normalizedSiteAkaikeWeights[siteIndex] = gard.siteAkaikeWeights[siteIndex]*gard.akaikeWeightScallingFactor;
+    
+    normalized_support = {};
+    for (bp, support; in; bp_support) {
+        normalized_support[bp] = support / total_weight;
     }
-
-    return gard.normalizedSiteAkaikeWeights;
+    DeleteObject (bp_support);
+    return normalized_support;
+    
 }
 
 
@@ -779,7 +846,7 @@ lfunction gard.GA.initializeModels (numberOfBreakPoints, populationSize, numberO
             }
             breakPoints [breakpoint.index] = (^"gard.variableSiteMap") [Random(0, numberOfPotentialBreakPoints)$1];
             breakPoints = gard.Helper.sortedMatrix(breakPoints);
-
+ 
         } while (gard.validatePartititon (breakPoints, ^"gard.minPartitionSize", ^"gard.numSites") == FALSE);
 
         initializedModels[breakPoints] = ^"math.Infinity";


=====================================
res/TemplateBatchFiles/PairwiseRelativeRatio.bf
=====================================
@@ -38,8 +38,7 @@ referenceSpecCount = ds.species;
 
 treeRead = 0;
 
-if (IS_TREE_PRESENT_IN_DATA)
-{
+if (IS_TREE_PRESENT_IN_DATA) {
 	treeString = DATAFILE_TREE;
 	treeRead = 1;
 }
@@ -65,36 +64,29 @@ for (counter = 1; counter<fileCount; counter = counter+1) {
 
 fprintf (stdout,"\n\n** All data files were read successfully **\n\n");
 
-if (dataType)
-{
-	if (codeTables)
-	{
-		dummy = ApplyGeneticCodeTable (codeTableMatrix[0]);
+if (dataType) {
+	if (codeTables) {
+		ApplyGeneticCodeTable (codeTableMatrix[0]);
 		ModelMatrixDimension = 0;
 	}
 	DataSetFilter filteredData = CreateFilter (ds,3,"","",GeneticCodeExclusions);
-}
-else
-{
+} else {
 	DataSetFilter filteredData = CreateFilter (ds,1);
 }
 
 
-if (treeRead)
-{
+if (treeRead) {
 	fprintf (stdout, "\n\nA tree was found in the data file:\n",treeString,"\n\nWould you like to use it:(Y/N)?");
 	fscanf (stdin, "String", response);
-	if ((response=="n")||(response=="N"))
-	{
+	if ((response=="n")||(response=="N")) {
 		treeRead = 0;
 	}
 	fprintf (stdout, "\n\n");
 }
 
-if (!treeRead)
-{
+if (!treeRead) {
 	SetDialogPrompt ("Please select a tree file for the data:");
-	fscanf (PROMPT_FOR_FILE, "String", treeString);
+	fscanf (PROMPT_FOR_FILE, REWIND, "String", treeString);
 }
 
 SelectTemplateModel(filteredData);
@@ -104,30 +96,20 @@ global  RelRatio;
 relationString = ":=RelRatio*";
 
 #include "selectModelParameters.bf";
-
 SetDialogPrompt ("Save full results to:");
-
 modelParameterCount = Rows("LAST_MODEL_PARAMETER_LIST");
-
 fprintf (PROMPT_FOR_FILE,CLEAR_FILE);
-
 tabulatedFileName = LAST_FILE_PATH;
-
-
 singleFileResults = {fileCount,1};
-
 fprintf (stdout,"\n\n***** RUNNING SINGLE FILE ANALYSES *****\n\n");
-
 fullParameterCount = 0;
 
-/*MESSAGE_LOGGING = 0;*/
-
-OPTIMIZATION_PRECISION = OPTIMIZATION_PRECISION/10;
-
 timer = Time(0);
 
 for (counter = 1; counter<= fileCount; counter += 1) {
 	HarvestFrequencies (vectorOfFrequencies,filteredData,1,1,1);
+	
+	
 	if (FREQUENCY_SENSITIVE) {
 		modelMatrix = 0;
 		if (USE_POSITION_SPECIFIC_FREQS) {
@@ -144,34 +126,9 @@ for (counter = 1; counter<= fileCount; counter += 1) {
 	}
 	
 	Tree firstFileTree = treeString;
-
 	LikelihoodFunction lf = (filteredData,firstFileTree);
 	Optimize (res,lf);
-	DeleteObject (lf);
-
 
-	if (counter<fileCount) {
-		DeleteObject (lf, :shallow);
-
-		DataSet ds = ReadDataFile (stringMatrix[counter]);
-		if (dataType)
-		{
-			if (codeTables)
-			{
-				dummy = ApplyGeneticCodeTable (codeTableMatrix[counter]);
-				ModelMatrixDimension = 0;
-			}
-			DataSetFilter filteredData = CreateFilter (ds,3,"","",GeneticCodeExclusions);
-		}
-		else
-		{
-			DataSetFilter filteredData = CreateFilter (ds,1);
-		}
-	}
-	else
-	{
-		fullParameterCount = res[1][1];
-	}
 	singleFileResults [counter-1] = res[1][0];
 	fprintf (stdout,"\nFile ",stringMatrix[counter-1]," : ln-likelihood = ",res[1][0]);
 	if (counter==1)
@@ -204,6 +161,27 @@ for (counter = 1; counter<= fileCount; counter += 1) {
 	{
 		fprintf (tabulatedFileName,",",res[0][counter3]);
 	}
+	DeleteObject (lf, :shallow);
+	if (counter<fileCount) {
+		DataSet ds = ReadDataFile (stringMatrix[counter]);
+		if (dataType)
+		{
+			if (codeTables)
+			{
+				ApplyGeneticCodeTable (codeTableMatrix[counter]);
+				ModelMatrixDimension = 0;
+			}
+			DataSetFilter filteredData = CreateFilter (ds,3,"","",GeneticCodeExclusions);
+		}
+		else
+		{
+			DataSetFilter filteredData = CreateFilter (ds,1);
+		}
+	}
+	else
+	{
+		fullParameterCount = res[1][1];
+	}
 }
 
 /*OPTIMIZATION_PRECISION = OPTIMIZATION_PRECISION*10;*/
@@ -222,17 +200,17 @@ fprintf(stdout,"\n\n In the summary table below");
 
 fprintf (stdout,"\n\n (*)   corresponds to the .05 significance level\n (**)  corresponds to the .01 significance level\n (***) corresponds to the .001 significance level.\n\n");
 
-separator = "+--------+--------+--------------+--------------+";
-fprintf (stdout,separator,"\n| File 1 | File 2 |      LRT     |    P-Value   |\n",separator);
+separator = "+--------+--------+--------------+--------------+--------------+";
+fprintf (stdout,separator,"\n| File 1 | File 2 |      LRT     |   Rel.Ratio  |    P-Value   |\n",separator);
 
-for (counter = 0; counter< fileCount; counter = counter+1) {
+for (counter = 0; counter< fileCount; counter += 1) {
     DataSet ds = ReadDataFile (stringMatrix[counter]);
 
 	if (dataType)
 	{
 		if (codeTables)
 		{
-			dummy = ApplyGeneticCodeTable (codeTableMatrix[counter]);
+			ApplyGeneticCodeTable (codeTableMatrix[counter]);
 			ModelMatrixDimension = 0;
 		}
 		DataSetFilter filteredData = CreateFilter (ds,3,"","",GeneticCodeExclusions);
@@ -269,7 +247,7 @@ for (counter = 0; counter< fileCount; counter = counter+1) {
 		{
 			if (codeTables)
 			{
-				dummy = ApplyGeneticCodeTable (codeTableMatrix[counter2]);
+				ApplyGeneticCodeTable (codeTableMatrix[counter2]);
 				ModelMatrixDimension = 0;
 			}
 			DataSetFilter filteredData2 = CreateFilter (ds2,3,"","",GeneticCodeExclusions);
@@ -297,53 +275,49 @@ for (counter = 0; counter< fileCount; counter = counter+1) {
 				Model RRmodel2 = (modelMatrix2,vectorOfFrequencies2,MULTIPLY_BY_FREQS);
 			}
 		}
+		
 
 		Tree secondFileTree = treeString;
+	
 
 		ReplicateConstraint (constraintString,secondFileTree,firstFileTree);
-
 		LikelihoodFunction lfConstrained = (filteredData2,secondFileTree,filteredData,firstFileTree);
-
 		Optimize (res1,lfConstrained);
 
 		LRT = 2*(singleFileResults[counter]+singleFileResults[counter2]-res1[1][0]);
 
+        /*
+        Export (lfE, lfConstrained);
+        fprintf (stdout, lfE, "\n");
+        */
+
 		degFDiff = 2*fullParameterCount-res1[1][1];
 
-		if (LRT>0)
-		{
+		if (LRT>0) {
 			pValue = 1.0-CChi2(LRT,degFDiff);
 		}
-		else
-		{
+		else {
 			pValue = 1;
 			fprintf (MESSAGE_LOG,"\nA negative LRT statistic encoutered. You may want to increase the optimization precision settings to resolve numerical apporximation errors");
 		}
 
-		fprintf (stdout," | ",Format (LRT,12,5)," | ", Format (pValue,12,8)," |");
+		fprintf (stdout," | ",Format (LRT,12,5)," | ",Format (RelRatio,12,5)," | ", Format (pValue,12,8)," |");
 
-		if (pValue<0.05)
-		{
-			if (pValue<0.01)
-			{
-				if (pValue<0.001)
-				{
+		if (pValue<0.05) {
+			if (pValue<0.01) {
+				if (pValue<0.001) {
 				    fprintf (stdout," (***) ");
 				}
-				else
-				{
+				else {
 			     	fprintf (stdout," (**) ");
 			    }
-			}
-			else
-			{
+			} else {
 			     fprintf (stdout," (*) ");
 			}
 		}
 
-		if ((counter==0)&&(counter2==1))
-		{
-			fprintf (tabulatedFileName,"\n\nRelative Ratio Tests\n\nFile 1,File 2,Ln-Likelihood,LRT,pValue");
+		if ((counter==0)&&(counter2==1)) {
+			fprintf (tabulatedFileName,"\n\nRelative Ratio Tests\n\nFile 1,File 2,Ln-Likelihood,LRT,RelRatio,pValue");
 			dataDimension = Columns(res1);
 			for (counter3 = 0; counter3 < dataDimension; counter3=counter3+1)
 			{
@@ -352,12 +326,13 @@ for (counter = 0; counter< fileCount; counter = counter+1) {
 			}
 		}
 
-		fprintf (tabulatedFileName,"\n",Format(counter+1,0,0),",",Format(counter2+1,0,0),",",res1[1][0],",",LRT,",",pValue);
-		for (counter3 = 0; counter3 < dataDimension; counter3=counter3+1)
-		{
+		fprintf (tabulatedFileName,"\n",Format(counter+1,0,0),",",Format(counter2+1,0,0),",",res1[1][0],",",LRT,",",RelRation,",",pValue);
+		for (counter3 = 0; counter3 < dataDimension; counter3=counter3+1) {
 			fprintf (tabulatedFileName,",",res1[0][counter3]);
 		}
+		DeleteObject (lfConstrained, :shallow);
 	}
+	
 }
 
 fprintf (stdout,"\n",separator,"\n");


=====================================
res/TemplateBatchFiles/SelectionAnalyses/RELAX.bf
=====================================
@@ -920,6 +920,9 @@ function relax.FitMainTestPair () {
 			} else {
 				parameters.SetRange (model.generic.GetGlobalParameter (relax.model_object_map ["relax.test"] , terms.relax.k), terms.relax.k_range1);
 			}
+			
+			//assert (__SIGTRAP__);
+						
 			relax.alternative_model.fit.take2 =  estimators.FitLF (relax.filter_names, relax.trees, { "0" : relax.model_map},
 																   relax.alternative_model.fit ,
 																   relax.model_object_map,
@@ -927,6 +930,7 @@ function relax.FitMainTestPair () {
 																   );
 
 
+            
 
 			if (relax.alternative_model.fit.take2 [terms.fit.log_likelihood] > relax.alternative_model.fit[terms.fit.log_likelihood]) {
 
@@ -947,6 +951,7 @@ function relax.FitMainTestPair () {
 				relax.alternative_model.fit = relax.alternative_model.fit.take2;
 			}
 
+            DeleteObject (relax.alternative_model.fit.take2);
 
 			parameters.SetRange (model.generic.GetGlobalParameter (relax.model_object_map ["relax.test"] , terms.relax.k), terms.relax.k_range);
 
@@ -995,8 +1000,7 @@ function relax.FitMainTestPair () {
 
 	io.ReportProgressMessageMD ("RELAX", "null", "Fitting the null (K := 1) model");
     
-
-
+    
 	for (relax.k = 1; relax.k < relax.numbers_of_tested_groups; relax.k += 1) {
 		relax.model_nmsp = relax.model_namespaces[relax.k ];
 		if (relax.k > 1) {
@@ -1006,7 +1010,6 @@ function relax.FitMainTestPair () {
 		}
 	}
 
-
 	relax.null_model.fit = estimators.FitExistingLF (relax.alternative_model.fit[terms.likelihood_function], relax.model_object_map);
 	io.ReportProgressMessageMD ("RELAX", "null", "* " + selection.io.report_fit (relax.null_model.fit, 9, relax.codon_data_info[terms.data.sample_size]));
 	relax.LRT = math.DoLRT (relax.null_model.fit[terms.fit.log_likelihood], relax.alternative_model.fit[terms.fit.log_likelihood],  relax.numbers_of_tested_groups-1);


=====================================
res/TemplateBatchFiles/SelectionAnalyses/modules/shared-load-file.bf
=====================================
@@ -103,8 +103,10 @@ function load_file (prefix) {
     utility.SetEnvVariable(utility.getGlobalValue ("terms.trees.data_for_neighbor_joining"),
                            codon_data_info[utility.getGlobalValue("terms.data.datafilter")]);
 
+
     partitions_and_trees = trees.LoadAnnotatedTreeTopology.match_partitions (codon_data_info[utility.getGlobalValue("terms.data.partitions")], name_mapping);
 
+
     utility.SetEnvVariable(utility.getGlobalValue ("terms.trees.data_for_neighbor_joining"), None);
 
         /**  this will return a dictionary of partition strings and trees; one set per partition, as in
@@ -155,7 +157,8 @@ function load_file (prefix) {
     } else {
         selected_branches = selection.io.defineBranchSets(partitions_and_trees);
     }
-
+    
+ 
     // Place in own attribute called `tested`
      selection.io.json_store_key_value_pair (json, None, utility.getGlobalValue("terms.json.tested"), selected_branches);
 
@@ -216,8 +219,10 @@ function load_file (prefix) {
  }
 
 function store_tree_information () {
-    // Place in own attribute called `tested`
 
+    // Place in own attribute called `tested`
+    
+ 
      selection.io.json_store_key_value_pair (json, None, utility.getGlobalValue("terms.json.tested"), selected_branches);
 
         /**  this will return a dictionary of selected branches; one set per partition, like in
@@ -255,6 +260,7 @@ function store_tree_information () {
     }
 
 
+
     selection.io.json_store_key_value_pair (json, None, utility.getGlobalValue("terms.json.partitions"),
                                                          filter_specification);
      trees = utility.Map (partitions_and_trees, "_partition_", '_partition_[terms.data.tree]');
@@ -263,11 +269,31 @@ function store_tree_information () {
      filter_names = utility.Map (filter_specification, "_partition_", '_partition_[terms.data.name]');
 
      /* Store original name mapping */
-     for (partition_index = 0; partition_index < partition_count; partition_index += 1) {
-
-        selection.io.json_store_branch_attribute(json, utility.getGlobalValue ("terms.original_name"), utility.getGlobalValue ("terms.json.node_label"), display_orders[utility.getGlobalValue ("terms.original_name")],
-                                         partition_index,
-                                         name_mapping);
+     
+     if (None != name_mapping) {
+         name_mapping_upper_case = {};
+         for (i,n; in; name_mapping) {
+             name_mapping_upper_case[i&&1] = n;
+         }
+
+         for (partition_index = 0; partition_index < partition_count; partition_index += 1) {
+        
+            local_name_mapping = {};
+            for (l, label; in; (trees[partition_index])[utility.getGlobalValue ("terms.trees.partitioned")]) {
+                if (label == ^"terms.tree_attributes.leaf") {
+                    if (name_mapping / l) {
+                        local_name_mapping [l] = name_mapping [l];
+                    } else {
+                        local_name_mapping [l] = name_mapping_upper_case [l];
+                    }
+                }
+            }
+        
+    
+            selection.io.json_store_branch_attribute(json, utility.getGlobalValue ("terms.original_name"), utility.getGlobalValue ("terms.json.node_label"), display_orders[utility.getGlobalValue ("terms.original_name")],
+                                             partition_index,
+                                             local_name_mapping);
+        }
     }
 
 


=====================================
res/TemplateBatchFiles/TemplateModels/GY94.ibf
=====================================
@@ -1,8 +1,7 @@
 /* defines a sparse transition probabilities matrix 
  now we'll go through the matrix and assign the elements based on syn/non-syn status*/
 
-function PopulateModelMatrix (ModelMatrixName&, EFV)
-{
+function PopulateModelMatrix (ModelMatrixName&, EFV) {
 	ModelMatrixName = {ModelMatrixDimension,ModelMatrixDimension}; 
 
 	hshift = 0;


=====================================
res/TemplateBatchFiles/TemplateModels/GY94.mdl
=====================================
@@ -18,8 +18,7 @@
 
 ModelMatrixDimension = 0;
 
-if (!ModelMatrixDimension)
-{
+if (!ModelMatrixDimension) {
 	ModelMatrixDimension = 64;
 	for (h = 0 ;h<64; h=h+1)
 	{
@@ -30,8 +29,7 @@ if (!ModelMatrixDimension)
 	}
 }
 
-function BuildCodonFrequencies (obsF)
-{
+function BuildCodonFrequencies (obsF) {
 	PIStop = 1.0;
 	result = {ModelMatrixDimension,1};
 	hshift = 0;
@@ -87,14 +85,9 @@ if (modelType>0)
 
 #include "GY94.ibf";
 
-
 GY94 = 0;
-
 vectorOfFrequencies = BuildCodonFrequencies (observedFreq);
-
 MULTIPLY_BY_FREQS = PopulateModelMatrix ("GY94", observedFreq);
-
 FREQUENCY_SENSITIVE = 1;
-
 Model GY94model = (GY94,vectorOfFrequencies,1);
 


=====================================
res/TemplateBatchFiles/files.lst
=====================================
@@ -108,4 +108,4 @@
 
 "","Perform relative ratio tests.","!Relative Ratio";
 "RRT","Use relative ratio test on 2 datasets and a variety of standard models","RelativeRatio.bf";
-"PRRTI","Given a list of files (and optinally genetic code tables), perform relative ratio tests on all possible pair of the data files.","PairwiseRelativeRatio.bf";
+"PRRTI","Given a list of files (and optionally genetic code tables), perform relative ratio tests on all possible pair of the data files.","PairwiseRelativeRatio.bf";


=====================================
res/TemplateBatchFiles/libv3/UtilityFunctions.bf
=====================================
@@ -1074,3 +1074,10 @@ function utility.FinishAndPrintProfile (_hyphy_profile_dump) {
     }
 }
 
+function utility.TrapAllErrors (command) {
+
+    LAST_HBL_EXECUTION_ERROR = "";
+    ExecuteCommands ("SetParameter(HBL_EXECUTION_ERROR_HANDLING,1,0);" + command);
+    return LAST_HBL_EXECUTION_ERROR;
+}
+


=====================================
res/TemplateBatchFiles/libv3/convenience/regexp.bf
=====================================
@@ -115,7 +115,7 @@ lfunction regexp.PartitionByRegularExpressions(strings, rex) {
         }
     }
 
-
+    
     return result;
 }
 


=====================================
res/TemplateBatchFiles/libv3/tasks/alignments.bf
=====================================
@@ -828,26 +828,6 @@ lfunction alignments.Extract_site_patterns (data_filter) {
         
         
     }
-
-    /*utility.ForEachPair (pattern_list, "_site_index_", "_pattern_",
-        '
-        utility.EnsureKey (`&site_info`, _pattern_);
-        utility.EnsureKey (`&site_info`[_pattern_], utility.getGlobalValue("terms.data.sites"));
-
-        (`&site_info`[_pattern_])[utility.getGlobalValue("terms.data.sites")] + _site_index_[1];
-
-        if (Abs ((`&site_info`[_pattern_])[utility.getGlobalValue("terms.data.sites")]) == 1) {
-            // first time we see this site
-            GetDataInfo (`&site_characters`, `data_filter`, -1, _pattern_);
-            `&site_characters` = utility.Filter (`&site_characters`,
-                                                 "_value_",
-                                                 "(+_value_>0)");
-
-            (`&site_info`[_pattern_])[^"terms.data.is_constant"] = Abs (`&site_characters`) <= 1;
-
-        }
-        '
-    );*/
     
     
 


=====================================
res/TemplateBatchFiles/libv3/tasks/trees.bf
=====================================
@@ -154,6 +154,7 @@ lfunction trees.GetTreeString(look_for_newick_tree) {
 
 
             if (regexp.Find(treeString, "^#NEXUS")) {
+            
                 ExecuteCommands(treeString);
 
                 if (!utility.GetEnvVariable("IS_TREE_PRESENT_IN_DATA")) {
@@ -207,8 +208,6 @@ lfunction trees.GetTreeString(look_for_newick_tree) {
         }
     }
 
-
-
     return
     {
         utility.getGlobalValue("terms.data.file"): look_for_newick_tree,
@@ -255,6 +254,7 @@ lfunction trees.LoadAnnotatedTopology(look_for_newick_tree) {
  */
 lfunction trees.LoadAnnotatedTopologyAndMap(look_for_newick_tree, mapping) {
 
+
     reverse = {};
 
     for (k,v; in; mapping) {
@@ -447,6 +447,7 @@ lfunction trees.ExtractTreeInfoFromTopology(topology_object) {
 
     branch_lengths = BranchLength(^topology_object, -1);
     branch_names   = BranchName(^topology_object, -1);
+        
     branch_count   = Max (2,utility.Array1D (branch_names) - 1);
     
     


=====================================
res/TemplateBatchFiles/readIndexFile.bf
=====================================
@@ -5,8 +5,7 @@ fscanf 			(PROMPT_FOR_FILE,"Lines",inLines);
 
 counter	= Columns (inLines);
 
-if (skipCodeSelectionStep)
-{
+if (skipCodeSelectionStep) {
 	stringMatrix 	= {counter$2,1};
 	codeTableMatrix = {counter$2,1};
 	counter  = 0;
@@ -17,7 +16,6 @@ if (skipCodeSelectionStep)
 		stringMatrix 	[k$2] = inLines[k];
 	}
 }
-else
-{
+else {
 	stringMatrix = Transpose (inLines);
 }


=====================================
src/core/associative_list.cpp
=====================================
@@ -88,6 +88,9 @@ BaseRef _AssociativeList::makeDynamic (void) const {
     return newAL;
 }
 
+
+//bool _debug_memory_leak = false;
+
 //_____________________________________________________________________________________________
 
 bool _AssociativeList::ParseStringRepresentation (_String& serialized_form, _FormulaParsingContext& fpc ) {
@@ -104,8 +107,10 @@ bool _AssociativeList::ParseStringRepresentation (_String& serialized_form, _For
             _ElementaryCommand::ExtractConditions (*(_String*)splitKeys(k), 0, key_value_pair, ':' , false);
             if (key_value_pair.countitems() == 2UL) {
                 
+                //_debug_memory_leak = true;
                 _String  key        (compute_keys_values ? ProcessLiteralArgument((_String*)key_value_pair(0),theP) : *(_String*)key_value_pair(0));
-              
+                //_debug_memory_leak = false;
+                
                 if (key.empty()) {
                   key = *(_String*)key_value_pair(0);
                 }


=====================================
src/core/batchlan.cpp
=====================================
@@ -1068,9 +1068,7 @@ void    _ExecutionList::ReportAnExecutionError (_String errMsg, bool doCurrentCo
 
 //____________________________________________________________________________________
 void    _ExecutionList::StartProfile (void) {
-    if (profileCounter) {
-        DeleteObject (profileCounter);
-    }
+    DeleteObject (profileCounter);
     profileCounter= new _Matrix (lLength, 2, false, true);
     doProfile = 1;
 }
@@ -1283,7 +1281,7 @@ _StringBuffer const       _ExecutionList::GenerateHelpMessage(_AVLList * scanned
             if (this_command->code == HY_HBL_COMMAND_FORMULA) {
                 _List      hbl_functions;
                 _AVLListX other_functions (&hbl_functions);
-                this_command->BuildListOfDependancies(other_functions, true, *this);
+                this_command->BuildListOfDependancies(other_functions, true, *this, true);
                 
                 for (AVLListXIteratorKeyValue function_iterator : AVLListXIterator (&other_functions)) {
                     _String * function_name = (_String *)other_functions.Retrieve (function_iterator.get_index());
@@ -3269,7 +3267,7 @@ void      _ElementaryCommand::ExecuteCase52 (_ExecutionList& chain) {
 }
 
 
-
+//extern bool _debug_memory_leak;
 
 //____________________________________________________________________________________
 
@@ -3362,12 +3360,24 @@ bool      _ElementaryCommand::Execute    (_ExecutionList& chain) {
                 indepA.ReorderList();
                 depA.ReorderList();
             }
-
+            
+ 
             //indep.Sort();
             //dep.Sort();
 
             holder.Union (indep,dep);
             leftOverVars.Sort ();
+            /*
+            BufferToConsole("\nIndependents+nDependendts\n");
+            ObjectToConsole(&holder); NLToConsole();
+            BufferToConsole("\nLeftover\n");
+            ObjectToConsole(&leftOverVars); NLToConsole();
+            */
+            
+            /*leftOverVars.Each ([](long v, unsigned long) -> void {
+                StringToConsole(*LocateVar(v)->GetName()); NLToConsole();
+            });*/
+            
             indep.Subtract (leftOverVars,holder);
 
             /* the bit with freeSlots is here b/c
@@ -3458,6 +3468,10 @@ bool      _ElementaryCommand::Execute    (_ExecutionList& chain) {
           }
           else{
             //printf ("Return compiled %d\n", ((_Formula*)simpleParameters(1))->GetList().lLength);
+              //if (_debug_memory_leak) {
+              //    BufferToConsole("In return while parsing AssociateList string repr\n");
+              //}
+
             ret_val = ((_Formula*)simpleParameters(1))->Compute(0,nil,nil,nil,HY_ANY_OBJECT,false);
           }
 


=====================================
src/core/batchlan2.cpp
=====================================
@@ -795,7 +795,7 @@ void      _ElementaryCommand::ExecuteCase58 (_ExecutionList& chain)
 {
     chain.currentCommand++;
 
-    const _String kStart ("START"),
+   static  const _String kStart ("START"),
                   kPause ("PAUSE"),
                   kResume ("RESUME");
     
@@ -1179,7 +1179,7 @@ _String const _HYHBLTypeToText (long type) {
 //____________________________________________________________________________________
 
 
-void _ElementaryCommand::ScanStringExpressionForHBLFunctions (_String* expression, _ExecutionList const& chain, bool recursive, _AVLListX& collection ) {
+void _ElementaryCommand::ScanStringExpressionForHBLFunctions (_String* expression, _ExecutionList const& chain, bool recursive, _AVLListX& collection, bool help_mode) {
   
   _Formula f, f2;
   
@@ -1190,8 +1190,8 @@ void _ElementaryCommand::ScanStringExpressionForHBLFunctions (_String* expressio
   long     parseCode = Parse(&f,*expression,fpc,&f2);
   
   if (parseCode != HY_FORMULA_FAILED ) {
-    f.ScanFormulaForHBLFunctions (collection, recursive);
-    f2.ScanFormulaForHBLFunctions(collection, recursive);
+    f.ScanFormulaForHBLFunctions (collection, recursive, !help_mode);
+    f2.ScanFormulaForHBLFunctions(collection, recursive, !help_mode);
   }
 
   
@@ -1199,7 +1199,7 @@ void _ElementaryCommand::ScanStringExpressionForHBLFunctions (_String* expressio
 
 //____________________________________________________________________________________
 
-void      _ElementaryCommand::BuildListOfDependancies    (_AVLListX & collection, bool recursive, _ExecutionList const & chain) {
+void      _ElementaryCommand::BuildListOfDependancies    (_AVLListX & collection, bool recursive, _ExecutionList const & chain, bool help_mode) {
   
   switch (code) {
       
@@ -1208,7 +1208,7 @@ void      _ElementaryCommand::BuildListOfDependancies    (_AVLListX & collection
     case 14:
     {
       if (parameters.lLength) {
-        ScanStringExpressionForHBLFunctions((_String*)parameters (0), chain, recursive, collection);
+        ScanStringExpressionForHBLFunctions((_String*)parameters (0), chain, recursive, collection, help_mode);
       }
       break;
     }


=====================================
src/core/batchlanruntime.cpp
=====================================
@@ -53,6 +53,7 @@
 
 #include      "function_templates.h"
 
+#include      <signal.h>
 
 #ifndef __HYPHY_NO_SQLITE__
   #include "sqlite3.h"
@@ -1515,7 +1516,7 @@ bool      _ElementaryCommand::HandleReplicateConstraint (_ExecutionList& current
                     }
                 };
 
-                if (reference_iteratee->HasLocals()) { // stuff to do
+                if (reference_iteratee->HasLocals() && !traversers[reference_argument]->IsAtRoot()) { // stuff to do
                     _List parameter_sets,
                           matched_subexpressions;
                     for (unsigned long i = 0UL; i < template_parameter_count; i++) {
@@ -2079,9 +2080,16 @@ bool      _ElementaryCommand::HandleGetURL(_ExecutionList& current_program){
 
 bool      _ElementaryCommand::HandleAssert (_ExecutionList& current_program) {
   current_program.advance();
+  static const _String kBreakpointTrap = ("__SIGTRAP__");
 
   try {
     _Formula parsed_expression;
+      
+    if (kBreakpointTrap == * GetIthParameter(0) ) {
+      raise(SIGTRAP);
+      return true;
+    }
+      
     _CheckExpressionForCorrectness (parsed_expression, *GetIthParameter(0UL), current_program, NUMBER);
     if (CheckEqual (parsed_expression.Compute()->Value (), 0.0)) { // assertion failed
       bool soft_assertions = hy_env::EnvVariableTrue(hy_env::assertion_behavior);


=====================================
src/core/category.cpp
=====================================
@@ -177,7 +177,7 @@ void _CategoryVariable::Construct (_List& parameters, _VariableContainer *theP)
                     !CheckEqual (iSplitter->GetMaxX(),1.0) ||
                     *theName == splitterName ||
                     (intervals = iSplitter->GetNumberOfIntervals()+1) < 2) {
-                HandleApplicationError (errorMsg & _String("Category variables which specify interval splitting options must be supported on [0,1], and not result in circular dependance"));
+                HandleApplicationError (errorMsg & _String("Category variables which specify interval splitting options must be supported on [0,1], and not result in circular dependence"));
                 return;
             }
 
@@ -475,7 +475,7 @@ void _CategoryVariable::Construct (_List& parameters, _VariableContainer *theP)
         }
     }
 
-    // disallow category -> category dependance
+    // disallow category -> category dependence
     for (long i=0; i<scannedVarsList.lLength; i++) {
         _Variable * curVar = (_Variable*)variablePtrs (scannedVarsList.list_data[i]);
         if (curVar->IsCategory()) {
@@ -489,7 +489,7 @@ void _CategoryVariable::Construct (_List& parameters, _VariableContainer *theP)
     hiddenMarkovModel = HY_NO_MODEL;
 
     parameterList.Duplicate  (&scannedVarsList);
-    // finally go thru all the variables and put them where they belong in dependance containers
+    // finally go thru all the variables and put them where they belong in dependence containers
 
     _SimpleList     exclude;
 


=====================================
src/core/constant.cpp
=====================================
@@ -724,6 +724,7 @@ HBLObjectRef _Constant::Time (HBLObjectRef cache) {
     //return     result;
 }
 
+
 //__________________________________________________________________________________
 HBLObjectRef _Constant::Less (HBLObjectRef theObj, HBLObjectRef cache) {
    return _check_type_and_compute (theObj, [] (hyFloat a, hyFloat b) -> hyFloat {return a < b;}, cache);


=====================================
src/core/formula.cpp
=====================================
@@ -2494,7 +2494,7 @@ bool _Formula::HasChanged (bool ingoreCats) {
 
 //__________________________________________________________________________________
 
-void _Formula::ScanFormulaForHBLFunctions (_AVLListX& collection , bool recursive) {
+void _Formula::ScanFormulaForHBLFunctions (_AVLListX& collection , bool recursive, bool simplify) {
 
 
   auto handle_function_id = [&collection, recursive] (const long hbl_id) -> void {
@@ -2514,7 +2514,9 @@ void _Formula::ScanFormulaForHBLFunctions (_AVLListX& collection , bool recursiv
 
   if (theTree) {
 
-    InternalSimplify(theTree);
+    if (simplify) {
+        InternalSimplify(theTree);
+    }
     node_iterator<long> ni (theTree, _HY_TREE_TRAVERSAL_PREORDER);
 
     while (node<long>* iterator = ni.Next()) {


=====================================
src/core/global_object_lists.cpp
=====================================
@@ -121,7 +121,7 @@ namespace hyphy_global_objects {
             //lf->Rebuild();
                /* 20170328 SLKP: this COULD MODIFY the 'listeners' object, hence the buffering */
           } else if (event_type == kNotificationTypeDelete) {
-            hy_global::HandleApplicationError (_String("Attempted to delete a data set filter ") & GetFilterName(index)->Enquote() & " which is still being referenced by a likelihood function ");
+            hy_global::HandleApplicationError (_String("Attempted to delete a data set filter ") & GetFilterName(index)->Enquote() & " which is referenced by the likelihood function " &  GetObjectNameByType (HY_BL_LIKELIHOOD_FUNCTION, FindLikeFuncIndex (lf), false)->Enquote());
           }
         }
       }
@@ -824,6 +824,11 @@ namespace hyphy_global_objects {
         return nil;
     }
     
+    //____________________________________________________________________________________
+    long    FindLikeFuncIndex (void * const lfp) {
+        return likeFuncList._SimpleList::Find ((long)lfp);
+    }
+
     //____________________________________________________________________________________
     long    FindSCFGName (_String const&s)
     {


=====================================
src/core/global_things.cpp
=====================================
@@ -121,7 +121,7 @@ namespace hy_global {
                      kErrorStringDatasetRefIndexError ("Dataset index reference out of range"),
                      kErrorStringMatrixExportError    ("Export matrix called with a non-polynomial matrix argument"),
                      kErrorStringNullOperand          ("Attempting to operate on an undefined value; this is probably the result of an earlier 'soft' error condition"),
-                     kHyPhyVersion  = _String ("2.5.28"),
+                     kHyPhyVersion  = _String ("2.5.29"),
     
                     kNoneToken = "None",
                     kNullToken = "null",


=====================================
src/core/include/batchlan.h
=====================================
@@ -386,7 +386,7 @@ public:
   
     bool              DecompileFormulae        (void);
   
-    void              BuildListOfDependancies  (_AVLListX & collection, bool recursive, _ExecutionList const& chain);
+    void              BuildListOfDependancies  (_AVLListX & collection, bool recursive, _ExecutionList const& chain, bool help_mode = false);
     
     
     
@@ -405,7 +405,7 @@ public:
 
 protected:
   
-    static    void ScanStringExpressionForHBLFunctions (_String*, _ExecutionList const&, bool, _AVLListX& );
+    static    void ScanStringExpressionForHBLFunctions (_String*, _ExecutionList const&, bool, _AVLListX& , bool help_mode = false);
 
     _String  *   GetIthParameter       (unsigned long i, bool range_check = true) const {
         BaseRef p = parameters.GetItemRangeCheck(i);


=====================================
src/core/include/formula.h
=====================================
@@ -247,7 +247,7 @@ public:
     static      _Formula*        PatchFormulasTogether (const _Formula& op1, const _Formula& op2, const char op_code);
     static      _Formula*        PatchFormulasTogether (const _Formula& op1, HBLObjectRef op2, const char op_code);
     
-    void        ScanFormulaForHBLFunctions (_AVLListX& collection , bool recursive);
+    void        ScanFormulaForHBLFunctions (_AVLListX& collection , bool recursive, bool simplify = true);
   
   
     /** A compute and forget utility function.


=====================================
src/core/include/global_object_lists.h
=====================================
@@ -298,6 +298,7 @@ namespace hyphy_global_objects {
     // added by afyp, March 18, 2007
 
     long      FindLikeFuncName                (_String const&, bool = false);
+    long      FindLikeFuncIndex               (void* const p);
     long      FindModelName                   (_String const&);
 
     extern   _AVLListX batchLanguageFunctionNamesIndexed;


=====================================
src/core/include/variable.h
=====================================
@@ -92,7 +92,7 @@ public:
     void        SetFormula (_Formula&); // set the variable to a new formula
 
     void   ClearValue (void) {
-        if (varValue) { delete (varValue); varValue = nil;}
+        if (varValue) { DeleteObject (varValue); varValue = nil;}
     }
 
     const     _Formula * get_constraint (void) const {


=====================================
src/core/likefunc.cpp
=====================================
@@ -2382,20 +2382,20 @@ bool        _LikelihoodFunction::HasBlockChanged(long index) const {
 
 //_______________________________________________________________________________________
 
-void      _LikelihoodFunction::RecurseConstantOnPartition (long blockIndex, long index, long dependance, long highestIndex, hyFloat weight, _Matrix& cache) {
+void      _LikelihoodFunction::RecurseConstantOnPartition (long blockIndex, long index, long dependence, long highestIndex, hyFloat weight, _Matrix& cache) {
     // SLKP 20210102: TODO this needs to be reviewed and confirmed as working.
     _CategoryVariable* thisC = (_CategoryVariable*)LocateVar(indexCat.list_data[index]);
 
     if (index<highestIndex) {
-        if ((!CheckNthBit(dependance,index))||thisC->is_hidden_markov()) {
-            RecurseCategory (blockIndex, index+1, dependance,highestIndex,weight);
+        if ((!CheckNthBit(dependence,index))||thisC->is_hidden_markov()) {
+            RecurseCategory (blockIndex, index+1, dependence,highestIndex,weight);
         } else {
             thisC->Refresh();
             long nI = thisC->GetNumberOfIntervals ();
             offsetCounter *= nI;
             for (long k = 0; k<nI; k++) {
                 thisC->SetIntervalValue(k);
-                RecurseConstantOnPartition(blockIndex,index+1,dependance, highestIndex,weight*thisC->GetIntervalWeight(k),cache);
+                RecurseConstantOnPartition(blockIndex,index+1,dependence, highestIndex,weight*thisC->GetIntervalWeight(k),cache);
                 categID+=offsetCounter/nI;
             }
             offsetCounter/=nI;
@@ -2450,7 +2450,7 @@ void      _LikelihoodFunction::RecurseConstantOnPartition (long blockIndex, long
 
 //_______________________________________________________________________________________
 
-void      _LikelihoodFunction::RecurseCategory(long blockIndex, long index, long dependance, long highestIndex, hyFloat weight
+void      _LikelihoodFunction::RecurseCategory(long blockIndex, long index, long dependence, long highestIndex, hyFloat weight
 #ifdef _SLKP_LFENGINE_REWRITE_
         ,_SimpleList* siteMultipliers, char runMode, hyFloat *runStorage,
         long branchIndex,              _SimpleList* branchValues
@@ -2459,8 +2459,8 @@ void      _LikelihoodFunction::RecurseCategory(long blockIndex, long index, long
 {
     _CategoryVariable* thisC = (_CategoryVariable*)LocateVar(indexCat.list_data[index]);
     if (index<highestIndex) {
-        if ((!CheckNthBit(dependance,index))||thisC->is_hidden_markov())
-            RecurseCategory (blockIndex, index+1, dependance,highestIndex,weight
+        if ((!CheckNthBit(dependence,index))||thisC->is_hidden_markov())
+            RecurseCategory (blockIndex, index+1, dependence,highestIndex,weight
 #ifdef _SLKP_LFENGINE_REWRITE_
                              ,siteMultipliers,runMode,runStorage
 #endif
@@ -2471,7 +2471,7 @@ void      _LikelihoodFunction::RecurseCategory(long blockIndex, long index, long
             offsetCounter *= nI;
             for (long k = 0; k<nI; k++) {
                 thisC->SetIntervalValue(k);
-                RecurseCategory(blockIndex,index+1,dependance, highestIndex,weight*thisC->GetIntervalWeight(k)
+                RecurseCategory(blockIndex,index+1,dependence, highestIndex,weight*thisC->GetIntervalWeight(k)
 #ifdef _SLKP_LFENGINE_REWRITE_
                                 ,siteMultipliers,runMode,runStorage,branchIndex,branchValues
 #endif
@@ -2698,7 +2698,7 @@ void    _LikelihoodFunction::CheckDependentBounds (void) {
     {
         _Matrix     dependancies (MAX(3,indexDep.lLength),indexInd.lLength,true,true);
         
-        // element (i,j) represents the dependance of i-th dep var on the j-th ind var
+        // element (i,j) represents the dependence of i-th dep var on the j-th ind var
         // 0 - no dep,
         // 1 -> monotone increase,
         // -1 -> monotone decrease
@@ -5350,7 +5350,7 @@ long    _LikelihoodFunction::Bracket (long index, hyFloat& left, hyFloat& middle
         while (middle-leftStep <= lowerBound) {
             if (verbosity_level > 100) {
               char buf [512];
-              snprintf (buf, sizeof(buf), "\n\t[_LikelihoodFunction::Bracket (index %ld) HANDLING LEFT BOUNDARY CASES] : LB = %g, current try = %.16g, current evaluated midpoint value = %.16g (%s)", index, lowerBound, middle-leftStep, middleValue, first ? "first" : "NOT first");
+              snprintf (buf, sizeof(buf), "\n\t[_LikelihoodFunction::Bracket (index %ld) HANDLING LEFT BOUNDARY CASES] : initial = %g, LB = %g, current try = %.16g, current evaluated midpoint value = %.16g (%s)", index, initialStep, lowerBound, middle-leftStep, middleValue, first ? "first" : "NOT first");
               BufferToConsole (buf);
             }
 
@@ -5396,6 +5396,9 @@ long    _LikelihoodFunction::Bracket (long index, hyFloat& left, hyFloat& middle
                     middle=lowerBound+2.*leftStep;
                     first = false;
                 }
+            } else {
+                middleValue = SetParametersAndCompute (index, middle, &currentValues, gradient);
+                return -2;
             }
         }
 
@@ -7678,7 +7681,7 @@ void    _LikelihoodFunction::ScanAllVariables (void) {
 
     avl.ReorderList();
 
-    if (templateKind<0) { // remove dependance of the function from the HMM model specifier
+    if (templateKind<0) { // remove dependence of the function from the HMM model specifier
         allVariables.Delete (allVariables.Find(-templateKind-1));
         rankVariables.Delete ((BaseRef)(-templateKind-1));
     }


=====================================
src/core/operation.cpp
=====================================
@@ -231,8 +231,10 @@ _Operation::_Operation  (bool isVar, _String& stuff, bool isG, _VariableContaine
         long f;
         _String theS (stuff);
         if (theParent) {
+            // SLKP 20210223 : why is this here
+            // if a global variable aleady exists, ignore the context?
             f = LocateVarByName(theS);
-
+            
             if (f>=0L && !FetchVar(f)->IsGlobal()) {
                 f = -1L;
             }


=====================================
src/core/parser.cpp
=====================================
@@ -380,10 +380,13 @@ void       UpdateChangingFlas (_SimpleList const & involvedVariables) {
 void DeleteVariable (long dv, bool deleteself, bool do_checks) {
     if (dv>=0L) {
 
+ 
         _String *name   = (_String*)variableNames.Retrieve (dv);
         _String my_name = *name&'.';
+        
         long    vidx    = variableNames.GetXtra (dv);
 
+
         UpdateChangingFlas (vidx);
 
         _SimpleList recCache;
@@ -547,7 +550,7 @@ void DeleteTreeVariable (long tree_variable_index, _SimpleList & parms, _String
         
         indices_to_delete.Each ([] (long var_idx, unsigned long) -> void {
             _Variable * delvar = LocateVar (var_idx);
-            //printf ("Deleting variable %s\n" , delvar->GetName()->get_str());
+            //printf ("DeleteTree::Deleting variable %s\n" , delvar->GetName()->get_str());
             if (delvar->ObjectClass() != TREE) {
                 variableNames.Delete (delvar->GetName(),true);
                 (*((_SimpleList*)&variablePtrs))[delvar->get_index()]=0;
@@ -671,12 +674,12 @@ bool CheckReceptacleAndStore (_String name, _String fID, bool checkValid, HBLObj
 
 //__________________________________________________________________________________
 void  InsertVar (_Variable* theV) {
-        
+    
+    
     long pos = variableNames.Insert (theV->theName);
-
-    if (pos < 0 && isDefiningATree == kTreeNodeBeingCreated)
+    
+    if (pos < 0 && isDefiningATree == kTreeNodeBeingCreated) {
         // automatically fix duplicate autogenerated tree node name
-    {
         long trySuffix  = 1;
         _String * tryName = new _String;
         do {



View it on GitLab: https://salsa.debian.org/med-team/hyphy/-/commit/6884fa66cf5d542d45a116543c13c6757e02b6ef

-- 
View it on GitLab: https://salsa.debian.org/med-team/hyphy/-/commit/6884fa66cf5d542d45a116543c13c6757e02b6ef
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20210226/975d44ad/attachment-0001.htm>


More information about the debian-med-commit mailing list